|
import openai |
|
|
|
|
|
class MessageChatCompletion: |
|
def __init__(self, |
|
model: str = 'gpt-5-mini', |
|
message: str = '', |
|
api_key: str = '', |
|
temperature: float = 0.10, |
|
top_p: float = 0.95, |
|
n: int = 1, |
|
stream: bool = False, |
|
stop: str = "\n", |
|
max_tokens: int = 4096, |
|
presence_penalty: float = 0.0, |
|
frequency_penalty: float = 0.0, |
|
logit_bias: int = None, |
|
user: str = ''): |
|
|
|
self.api_key = api_key |
|
openai.api_key = self.api_key |
|
|
|
if model in ["gpt-4o","gpt-4o-mini","gpt-5-mini","gpt-5"]: |
|
self.endpoint = "https://api.openai.com/v1/chat/completions" |
|
else: |
|
self.endpoint = "https://api.openai.com/v1/completions" |
|
|
|
self.headers = { |
|
"Content-Type": "application/json", |
|
"Authorization": f"Bearer {self.api_key}", |
|
} |
|
|
|
self.prompt = { |
|
"model": model, |
|
"messages": [], |
|
"temperature": temperature, |
|
"top_p": top_p, |
|
"n": n, |
|
"stream": stream, |
|
"stop": stop, |
|
"presence_penalty": presence_penalty, |
|
"frequency_penalty": frequency_penalty |
|
} |
|
|
|
if max_tokens is not None: |
|
self.prompt["max_tokens"] = max_tokens |
|
|
|
if logit_bias is not None: |
|
self.prompt["logit_bias"] = logit_bias |
|
|
|
if user != '': |
|
self.prompt["user"] = user |
|
|
|
if message != '': |
|
self.new_user_message(content=message) |
|
|
|
self.response = '' |
|
|
|
self.error = False |
|
|
|
def new_message(self, role: str = 'user', content: str = '', name: str = ''): |
|
new_message = {"role": role, "content": f"{content}"} |
|
if name != '': |
|
new_message['name'] = name |
|
|
|
self.prompt['messages'].append(new_message) |
|
|
|
def new_user_message(self, content: str = '', name: str = ''): |
|
self.new_message(role='user', content=content, name=name) |
|
|
|
def new_system_message(self, content: str = '', name: str = ''): |
|
self.new_message(role='system', content=content, name=name) |
|
|
|
def new_assistant_message(self, content: str = '', name: str = ''): |
|
self.new_message(role='assistant', content=content, name=name) |
|
|
|
def get_last_message(self): |
|
return self.prompt['messages'][-1]['content'] |
|
|
|
def send_message(self): |
|
|
|
try: |
|
self.error = False |
|
|
|
response = openai.chat.completions.create( |
|
model=self.prompt['model'], |
|
messages=self.prompt['messages'], |
|
frequency_penalty=self.prompt['frequency_penalty'], |
|
temperature=self.prompt['temperature'], |
|
max_tokens=self.prompt['max_tokens'], |
|
top_p=self.prompt['top_p'], |
|
presence_penalty=self.prompt['presence_penalty'], |
|
stream=self.prompt['stream'] |
|
) |
|
|
|
full_response = response.choices[0].message.content |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
self.new_system_message(content=full_response) |
|
|
|
return self.response |
|
|
|
except Exception as e: |
|
self.error = True |
|
|
|
if self.api_key == '' or self.api_key is None: |
|
self.new_system_message(content="API key is missing") |
|
else: |
|
self.new_system_message(content=f"Unable to generate ChatCompletion response\nException: {e}") |
|
return e |
|
|