diff --git a/OS/01/assistant/assistant.py b/OS/01/assistant/assistant.py index 1d623c7..6342387 100644 --- a/OS/01/assistant/assistant.py +++ b/OS/01/assistant/assistant.py @@ -93,21 +93,19 @@ def queue_listener(): to_user.put(chunk) # Speak full sentences out loud - accumulated_text += chunk["content"] - sentences = split_into_sentences(accumulated_text) - if is_full_sentence(sentences[-1]): - for sentence in sentences: - for audio_chunk in tts(sentence): - to_user.put(audio_chunk) - accumulated_text = "" - else: - for sentence in sentences[:-1]: - for audio_chunk in tts(sentence): - to_user.put(audio_chunk) - accumulated_text = sentences[-1] - - if chunk["type"] == "message" and "content" in sentence: - sentence += chunk.get("content") + if chunk["type"] == "assistant": + accumulated_text += chunk["content"] + sentences = split_into_sentences(accumulated_text) + if is_full_sentence(sentences[-1]): + for sentence in sentences: + for audio_chunk in tts(sentence): + to_user.put(audio_chunk) + accumulated_text = "" + else: + for sentence in sentences[:-1]: + for audio_chunk in tts(sentence): + to_user.put(audio_chunk) + accumulated_text = sentences[-1] # If we have a new message, save our progress and go back to the top if not to_assistant.empty(): diff --git a/OS/01/assistant/create_interpreter.py b/OS/01/assistant/create_interpreter.py index a78dbc0..7f2f473 100644 --- a/OS/01/assistant/create_interpreter.py +++ b/OS/01/assistant/create_interpreter.py @@ -87,6 +87,9 @@ Remember: You can run Python code. Be very concise. Ensure that you actually run # This is the name that will appear to the LLM. name = "python" + def __init__(self): + self.halt = False + def run(self, code): """Generator that yields a dictionary in LMC Format.""" @@ -98,17 +101,18 @@ Remember: You can run Python code. Be very concise. Ensure that you actually run response = requests.post(f"http://localhost:{computer_port}/run", json=data, stream=True) # Stream the response for chunk in response.iter_content(chunk_size=100000000): + if self.halt: + self.halt = False + break if chunk: # filter out keep-alive new lines yield json.loads(chunk.decode()) def stop(self): - """Stops the code.""" - # Not needed here, because e2b.run_code isn't stateful. - pass + self.halt = True def terminate(self): """Terminates the entire process.""" - # Not needed here, because e2b.run_code isn't stateful. + # dramatic!! pass interpreter.computer.languages = [Python]