diff --git a/apps/deepseek/app.py b/apps/deepseek/app.py index 3bd73f96..6f2c4185 100644 --- a/apps/deepseek/app.py +++ b/apps/deepseek/app.py @@ -1,21 +1,25 @@ from openai import OpenAI +import hal9 as h9 -client = OpenAI( - api_key="hal9", - base_url="https://api.hal9.com/proxy/server=https://api.deepseek.com") +client = OpenAI(api_key = "placeholder", base_url = "https://api.hal9.com/proxy/server=https://api.deepseek.com") -stream = client.chat.completions.create( - model="deepseek-reasoner", - messages=[ - {"role": "system", "content": "You are a helpful assistant"}, - {"role": "user", "content": input()}, - ], - stream=True -) +messages = h9.load("messages", [ {"role": "system", "content": "You are a helpful assistant"} ]) +messages.append({"role": "user", "content": input()}) -for chunk in stream: +completion = client.chat.completions.create(model = "deepseek-reasoner", messages = messages, stream = True) + +response = "" +print("Thinking...\n") +for chunk in completion: if len(chunk.choices) > 0: - if chunk.choices[0].delta.reasoning_content is not None and chunk.choices[0].delta.reasoning_content: - print(chunk.choices[0].delta.reasoning_content, end="") - elif chunk.choices[0].delta.content is not None: - print(chunk.choices[0].delta.content, end="") + delta = chunk.choices[0].delta + if delta.reasoning_content is not None and delta.reasoning_content: + print(delta.reasoning_content, end="") + elif delta.content is not None: + if response == "": + print("\n\n---\n\n") + print(delta.content, end="") + response += delta.content + +messages.append({"role": "assistant", "content": response}) +h9.save("messages", messages, hidden = True) \ No newline at end of file diff --git a/apps/openai/app.py b/apps/openai/app.py index 3699ae94..403d08b2 100644 --- a/apps/openai/app.py +++ b/apps/openai/app.py @@ -2,20 +2,21 @@ from openai import OpenAI import hal9 as h9 -messages = h9.load("messages", []) -client = OpenAI( - base_url="https://api.hal9.com/proxy/server=https://api.openai.com/v1/", - api_key = "hal9" -) +client = OpenAI(base_url="https://api.hal9.com/proxy/server=https://api.openai.com/v1/", api_key = "placeholder") +messages = h9.load("messages", []) +messages.append({"role": "user", "content": input()}) -completion = client.chat.completions.create( - model = "o1-preview", - messages = [ - {"role": "user", "content": input()}, - ] - ) +completion = client.chat.completions.create(model = "o1-preview", messages = messages, stream = True) h9.save("messages", messages, hidden = True) -print(completion.choices[0].message.content) \ No newline at end of file +response = "" +for chunk in completion: + if chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content: + content = chunk.choices[0].delta.content + print(content, end="") + response += content + +messages.append({"role": "assistant", "content": response}) +h9.save("messages", messages, hidden = True) \ No newline at end of file