Skip to content

Commit

Permalink
Merge remote-tracking branch 'private_repo/main'
Browse files Browse the repository at this point in the history
  • Loading branch information
LeonOstrez committed Oct 3, 2024
2 parents c53fc98 + 82aed71 commit 19989fb
Show file tree
Hide file tree
Showing 71 changed files with 1,732 additions and 995 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ venv/
.env
*.pyc
*.db
*.db-shm
*.db-wal
config.json
poetry.lock
.DS_Store
Expand Down
9 changes: 5 additions & 4 deletions core/agents/architect.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ async def plan_architecture(self, spec: Specification):

await self.send_message("Picking technologies to use ...")

llm = self.get_llm()
llm = self.get_llm(stream_output=True)
convo = (
AgentConvo(self)
.template(
Expand Down Expand Up @@ -247,11 +247,12 @@ async def check_system_dependencies(self, spec: Specification):
remedy = "If you would like to use it locally, please install it before proceeding."
await self.send_message(f"❌ {dep['name']} is not available. {remedy}")
await self.ask_question(
f"Once you have installed {dep['name']}, please press Continue.",
buttons={"continue": "Continue"},
"",
buttons={"continue": f"I've installed {dep['name']}"},
buttons_only=True,
default="continue",
)

else:
await self.send_message(f"✅ {dep['name']} is available.")

Expand All @@ -271,7 +272,7 @@ async def configure_template(self, spec: Specification, template_class: BaseProj
# If template has no options, no need to ask LLM for anything
return NoOptions()

llm = self.get_llm()
llm = self.get_llm(stream_output=True)
convo = (
AgentConvo(self)
.template(
Expand Down
23 changes: 12 additions & 11 deletions core/agents/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,18 @@ def __init__(
step: Optional[Any] = None,
prev_response: Optional["AgentResponse"] = None,
process_manager: Optional["ProcessManager"] = None,
data: Optional[Any] = None,
):
"""
Create a new agent.
"""
self.ui_source = AgentSource(self.display_name, self.agent_type)
self.ui = ui
self.stream_output = True
self.state_manager = state_manager
self.process_manager = process_manager
self.prev_response = prev_response
self.step = step
self.data = data

@property
def current_state(self) -> ProjectState:
Expand All @@ -55,11 +56,11 @@ async def send_message(self, message: str):
Send a message to the user.
Convenience method, uses `UIBase.send_message()` to send the message,
setting the correct source.
setting the correct source and project state ID.
:param message: Message to send.
"""
await self.ui.send_message(message + "\n", source=self.ui_source)
await self.ui.send_message(message + "\n", source=self.ui_source, project_state_id=str(self.current_state.id))

async def ask_question(
self,
Expand All @@ -76,7 +77,7 @@ async def ask_question(
Ask a question to the user and return the response.
Convenience method, uses `UIBase.ask_question()` to
ask the question, setting the correct source and
ask the question, setting the correct source and project state ID, and
logging the question/response.
:param question: Question to ask.
Expand All @@ -97,6 +98,7 @@ async def ask_question(
hint=hint,
initial_text=initial_text,
source=self.ui_source,
project_state_id=str(self.current_state.id),
)
await self.state_manager.log_user_input(question, response)
return response
Expand All @@ -106,16 +108,14 @@ async def stream_handler(self, content: str):
Handle streamed response from the LLM.
Serves as a callback to `AgentBase.llm()` so it can stream the responses to the UI.
This can be turned on/off on a pe-request basis by setting `BaseAgent.stream_output`
to True or False.
:param content: Response content.
"""
if self.stream_output:
await self.ui.send_stream_chunk(content, source=self.ui_source)

await self.ui.send_stream_chunk(content, source=self.ui_source, project_state_id=str(self.current_state.id))

if content is None:
await self.ui.send_message("", source=self.ui_source)
await self.ui.send_message("", source=self.ui_source, project_state_id=str(self.current_state.id))

async def error_handler(self, error: LLMError, message: Optional[str] = None) -> bool:
"""
Expand Down Expand Up @@ -150,7 +150,7 @@ async def error_handler(self, error: LLMError, message: Optional[str] = None) ->

return False

def get_llm(self, name=None) -> Callable:
def get_llm(self, name=None, stream_output=False) -> Callable:
"""
Get a new instance of the agent-specific LLM client.
Expand All @@ -170,7 +170,8 @@ def get_llm(self, name=None) -> Callable:

llm_config = config.llm_for_agent(name)
client_class = BaseLLMClient.for_provider(llm_config.provider)
llm_client = client_class(llm_config, stream_handler=self.stream_handler, error_handler=self.error_handler)
stream_handler = self.stream_handler if stream_output else None
llm_client = client_class(llm_config, stream_handler=stream_handler, error_handler=self.error_handler)

async def client(convo, **kwargs) -> Any:
"""
Expand Down
Loading

0 comments on commit 19989fb

Please sign in to comment.