Skip to content

Commit

Permalink
[apps/hal9] stream_print should print from stream
Browse files Browse the repository at this point in the history
  • Loading branch information
javierluraschi committed Jan 12, 2025
1 parent f15287e commit 94f094c
Show file tree
Hide file tree
Showing 7 changed files with 26 additions and 25 deletions.
7 changes: 3 additions & 4 deletions apps/hal9/tools/calculator.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
from utils import stream_print

def solve_math_problem(steps_explanation, code_solution):
stream_print("Steps:\n")
stream_print(steps_explanation)
stream_print("\n\nPython Code:\n")
print("Steps:\n")
print(steps_explanation)
print("\n\nPython Code:\n")
exec(code_solution)
return f"Steps:\n{steps_explanation}\n\n\nPython Code: {code_solution}"

Expand Down
3 changes: 1 addition & 2 deletions apps/hal9/tools/csv_agent.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from utils import stream_print
import pandas as pd
from utils import generate_response, load_messages, insert_message, execute_function, save_messages, insert_tool_message
import traceback
Expand Down Expand Up @@ -202,7 +201,7 @@ def fix_python_code(csv_path, code):
return f"An error has occurred again -> {last_line} ... Complete traceback: {tb}"

def final_response(final_message):
stream_print(final_message)
print(final_message)
return final_message

########################### Descriptions ##########################
Expand Down
9 changes: 5 additions & 4 deletions apps/hal9/tools/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,17 @@ def answer_generic_question(user_input):
# load messages
messages = load_messages(file_path="./.storage/.generic_agent_messages.json")
messages = insert_message(messages, "user", user_input)
response = Groq().chat.completions.create(
stream = Groq().chat.completions.create(
model = "llama3-70b-8192",
messages = messages,
temperature = 0,
seed = 1)
seed = 1,
stream = True)

text_response = response.choices[0].message.content
text_response = stream_print(stream)
messages = insert_message(messages, "assistant", text_response)
save_messages(messages, file_path="./.storage/.generic_agent_messages.json")
stream_print(text_response)

return text_response

answer_generic_question_description = {
Expand Down
14 changes: 7 additions & 7 deletions apps/hal9/tools/hal9.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
from utils import stream_print

def answer_hal9_questions(user_input):
response = Groq().chat.completions.create(
model = "llama3-70b-8192",
messages = [{"role": "system", "content": DATA["hal9"]},{"role": "user", "content": user_input}],
temperature = 0,
seed = 1)
stream = Groq().chat.completions.create(
model = "llama3-70b-8192",
messages = [{"role": "system", "content": DATA["hal9"]},{"role": "user", "content": user_input}],
temperature = 0,
seed = 1,
stream = True)

text_response = response.choices[0].message.content
stream_print(text_response)
text_response = stream_print(stream)
return text_response

answer_hal9_questions_description = {
Expand Down
4 changes: 2 additions & 2 deletions apps/hal9/tools/streamlit.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def fix_code(chat_input, error, complete_traceback, python_code):
def debug_code(python_code):
try:
exec(python_code)
return "Streamlit app generated and running properly", "", ""
return "About to show your app...", "", ""
except Exception as e:
tb = traceback.format_exc()
relevant_error_info = tb.splitlines()
Expand Down Expand Up @@ -64,7 +64,7 @@ def streamlit_generator(prompt):
tries = 0
while tries < max_tries:
result, error, complete_traceback = debug_code(streamlit_code)
if result == "Streamlit app generated and running properly":
if result == "About to show your app...":
save_python_code(streamlit_code)
messages = insert_message(messages, "assistant", streamlit_code)
save_messages(messages, file_path="./.storage/.streamlit_messages.json")
Expand Down
3 changes: 1 addition & 2 deletions apps/hal9/tools/text_agent.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from utils import stream_print
import pandas as pd
from utils import generate_response, load_messages, insert_message, execute_function, save_messages, insert_tool_message, generate_embeddings
from sklearn.metrics.pairwise import cosine_similarity
Expand Down Expand Up @@ -130,7 +129,7 @@ def random_pick_chunks(num_chunks, file_to_filter=None):
return selected_chunks.to_dict(orient='records')

def final_response(final_message):
stream_print(final_message)
print(final_message)
return final_message

########################### Descriptions ##########################
Expand Down
11 changes: 7 additions & 4 deletions apps/hal9/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,10 +179,13 @@ def execute_function(model_response, functions):
print(f"Error executing function '{function_name}': {e}")
raise

def stream_print(text: str):
for char in text:
print(char, end="", flush=True)
time.sleep(0.02)
def stream_print(stream):
content = ""
for chunk in stream:
if len(chunk.choices) > 0 and chunk.choices[0].delta.content is not None:
print(chunk.choices[0].delta.content, end="")
content += chunk.choices[0].delta.content
return content

def insert_tool_message(messages, model_response, tool_result):
tool_calls = model_response.choices[0].message.tool_calls
Expand Down

0 comments on commit 94f094c

Please sign in to comment.