Skip to content

Commit

Permalink
Update function signature, refactor code, add new langflow helper fun…
Browse files Browse the repository at this point in the history
…ctions and files, and add version module (#1570)

* Update function signature and import statements

* Refactor code and fix bugs

* Add new langflow helper functions and remove base model component

* Add new files and modify existing files

* Add version module and update imports

* Update packages include path in pyproject.toml

* Update Poetry version to 1.8.2
  • Loading branch information
ogabrielluiz committed Mar 27, 2024
1 parent a92dcaf commit eaf2479
Show file tree
Hide file tree
Showing 44 changed files with 121 additions and 613 deletions.
3 changes: 1 addition & 2 deletions .github/workflows/lint.yml
Expand Up @@ -14,15 +14,14 @@ on:
- "src/backend/**"

env:
POETRY_VERSION: "1.7.0"
POETRY_VERSION: "1.8.2"

jobs:
lint:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- "3.9"
- "3.10"
- "3.11"
steps:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Expand Up @@ -15,7 +15,7 @@ on:
- "src/backend/**"

env:
POETRY_VERSION: "1.5.0"
POETRY_VERSION: "1.8.2"

jobs:
build:
Expand Down
2 changes: 1 addition & 1 deletion deploy/base.Dockerfile
Expand Up @@ -23,7 +23,7 @@ ENV PYTHONUNBUFFERED=1 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
POETRY_VERSION=1.5.1 \
POETRY_VERSION=1.8.2 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root
Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/__main__.py
Expand Up @@ -9,10 +9,7 @@
import httpx
import typer
from dotenv import load_dotenv
from multiprocess import (
Process, # type: ignore
cpu_count, # type: ignore
)
from multiprocess import Process, cpu_count # type: ignore
from rich import box
from rich import print as rprint
from rich.console import Console
Expand Down
199 changes: 0 additions & 199 deletions src/backend/base/langflow/alembic/helpers/flow.py

This file was deleted.

34 changes: 0 additions & 34 deletions src/backend/base/langflow/alembic/helpers/record.py

This file was deleted.

2 changes: 1 addition & 1 deletion src/backend/base/langflow/api/utils.py
Expand Up @@ -164,7 +164,7 @@ def get_is_component_from_data(data: dict):


async def check_langflow_version(component: StoreComponentCreate):
from langflow import __version__ as current_version
from langflow.version.version import __version__ as current_version # type: ignore

if not component.last_tested_version:
component.last_tested_version = current_version
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/api/v1/endpoints.py
Expand Up @@ -239,7 +239,7 @@ async def create_upload_file(
# get endpoint to return version of langflow
@router.get("/version")
def get_version():
from langflow.version import __version__
from langflow.version import __version__ # type: ignore

return {"version": __version__}

Expand Down
16 changes: 10 additions & 6 deletions src/backend/base/langflow/base/agents/agent.py
@@ -1,10 +1,10 @@
from typing import List, Union
from typing import List, Optional, Union, cast

from langchain.agents import AgentExecutor, BaseMultiActionAgent, BaseSingleActionAgent
from langchain_core.runnables import Runnable

from langflow.custom import CustomComponent
from langflow.field_typing import BaseMemory, Text, Tool
from langflow.interface.custom.custom_component import CustomComponent


class LCAgentComponent(CustomComponent):
Expand Down Expand Up @@ -44,15 +44,19 @@ async def run_agent(
inputs: str,
input_variables: list[str],
tools: List[Tool],
memory: BaseMemory = None,
memory: Optional[BaseMemory] = None,
handle_parsing_errors: bool = True,
output_key: str = "output",
) -> Text:
if isinstance(agent, AgentExecutor):
runnable = agent
else:
runnable = AgentExecutor.from_agent_and_tools(
agent=agent, tools=tools, verbose=True, memory=memory, handle_parsing_errors=handle_parsing_errors
agent=agent, # type: ignore
tools=tools,
verbose=True,
memory=memory,
handle_parsing_errors=handle_parsing_errors,
)
input_dict = {"input": inputs}
for var in input_variables:
Expand All @@ -61,11 +65,11 @@ async def run_agent(
result = await runnable.ainvoke(input_dict)
self.status = result
if output_key in result:
return result.get(output_key)
return cast(str, result.get(output_key))
elif "output" not in result:
if output_key != "output":
raise ValueError(f"Output key not found in result. Tried '{output_key}' and 'output'.")
else:
raise ValueError("Output key not found in result. Tried 'output'.")

return result.get("output")
return cast(str, result.get("output"))
14 changes: 7 additions & 7 deletions src/backend/base/langflow/base/models/model.py
@@ -1,10 +1,10 @@
from typing import Optional
from typing import Optional, Union

from langchain_core.language_models.chat_models import BaseChatModel
from langchain_core.language_models.llms import LLM
from langchain_core.messages import HumanMessage, SystemMessage

from langflow.interface.custom.custom_component import CustomComponent
from langflow.custom import CustomComponent


class LCModelComponent(CustomComponent):
Expand Down Expand Up @@ -34,15 +34,15 @@ def get_result(self, runnable: LLM, stream: bool, input_value: str):
def get_chat_result(
self, runnable: BaseChatModel, stream: bool, input_value: str, system_message: Optional[str] = None
):
messages = []
messages: list[Union[HumanMessage, SystemMessage]] = []
if system_message:
messages.append(SystemMessage(system_message))
messages.append(SystemMessage(content=system_message))
if input_value:
messages.append(HumanMessage(input_value))
messages.append(HumanMessage(content=input_value))
if stream:
result = runnable.stream(messages)
return runnable.stream(messages)
else:
message = runnable.invoke(messages)
result = message.content
self.status = result
return result
return result
4 changes: 2 additions & 2 deletions src/backend/base/langflow/components/agents/XMLAgent.py
@@ -1,4 +1,4 @@
from typing import List
from typing import List, Optional

from langchain.agents import create_xml_agent
from langchain_core.prompts import PromptTemplate
Expand Down Expand Up @@ -69,7 +69,7 @@ async def build(
llm: BaseLLM,
tools: List[Tool],
prompt: str,
memory: BaseMemory = None,
memory: Optional[BaseMemory] = None,
tool_template: str = "{name}: {description}",
handle_parsing_errors: bool = True,
) -> Text:
Expand Down

0 comments on commit eaf2479

Please sign in to comment.