Skip to content

Commit

Permalink
Merge pull request #64 from Significant-Gravitas/zamilmajdy/agpt-432-…
Browse files Browse the repository at this point in the history
…function-generation-is-applying-strict-check-on-args-and

[AGPT-432] Provide Request & Response checking on Function Generation
  • Loading branch information
Swiftyos authored Mar 1, 2024
2 parents 10da924 + f79dec1 commit e95f544
Show file tree
Hide file tree
Showing 23 changed files with 1,311 additions and 592 deletions.
7 changes: 5 additions & 2 deletions codex/__main__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import asyncio
import io
import logging
import os
import shutil
import zipfile
Expand All @@ -12,7 +13,7 @@
import codex.common.test_const as test_const
from codex.common.logging_config import setup_logging

# from networkx import is_valid_degree_sequence_havel_hakimi
logger = logging.getLogger(__name__)


@click.group()
Expand Down Expand Up @@ -95,6 +96,8 @@ async def fetch_deliverable(session, user_id, app_id):
return deploy_data
except Exception as e:
click.echo(f"Error fetching deliverable: {e}")
print("Problematic URL: ", url)
logger.exception(e)
return


Expand Down Expand Up @@ -142,7 +145,7 @@ def benchmark():


@cli.command()
def run_example():
def example():
from codex.requirements.model import ExampleTask

i = 1
Expand Down
38 changes: 25 additions & 13 deletions codex/common/ai_block.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,10 @@ class config:
arbitrary_types_allowed = True


# This can be used for synchronous testing without making an actual API call
MOCK_RESPONSE = ""


class AIBlock:
"""
The AI BLock is a base class for all AI Blocks. It provides a common interface for
Expand Down Expand Up @@ -195,7 +199,7 @@ async def store_call_attempt(
)
return call_attempt

def load_temaplate(self, template: str, invoke_params: dict) -> str:
def load_template(self, template: str, invoke_params: dict) -> str:
try:
lang_str = ""
if self.langauge:
Expand Down Expand Up @@ -269,8 +273,8 @@ async def invoke(self, ids: Identifiers, invoke_params: dict, max_retries=3) ->
try:
if self.is_json_response:
invoke_params["format_instructions"] = self.get_format_instructions()
system_prompt = self.load_temaplate("system", invoke_params)
user_prompt = self.load_temaplate("user", invoke_params)
system_prompt = self.load_template("system", invoke_params)
user_prompt = self.load_template("user", invoke_params)

request_params = {
"model": self.model,
Expand All @@ -287,9 +291,7 @@ async def invoke(self, ids: Identifiers, invoke_params: dict, max_retries=3) ->
logger.error(f"Error creating request params: {e}")
raise LLMFailure(f"Error creating request params: {e}")
try:
response = await self.oai_client.chat.completions.create(**request_params)

presponse = self.parse(response)
presponse = await self.call_llm(request_params)

await self.store_call_attempt(
ids.user_id,
Expand All @@ -312,15 +314,12 @@ async def invoke(self, ids: Identifiers, invoke_params: dict, max_retries=3) ->
invoke_params["generation"] = "Error generating response"
invoke_params["error"] = str(error_message)

retry_prompt = self.load_temaplate("retry", invoke_params)
retry_prompt = self.load_template("retry", invoke_params)
request_params["messages"] = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": retry_prompt},
]
response = await self.oai_client.chat.completions.create(
**request_params
)
presponse = self.parse(response)
presponse = await self.call_llm(request_params)
assert request_params["messages"], "Messages not set"

await self.store_call_attempt(
Expand All @@ -332,21 +331,34 @@ async def invoke(self, ids: Identifiers, invoke_params: dict, max_retries=3) ->
)
validated_response = self.validate(invoke_params, presponse)
break
except Exception as retry_error:
except ValidationError as retry_error:
logger.warning(
f"{retries}/{max_retries}"
+ f" Failed validating response: {retry_error}"
)
error_message = retry_error
continue
if not validated_response:
raise LLMFailure(f"Error validating response: {validation_error}")
except Exception as unkown_error:
logger.error(f"Error invoking AIBlock: {unkown_error}")
logger.exception(f"Error invoking AIBlock: {unkown_error}", unkown_error)
raise LLMFailure(f"Error invoking AIBlock: {unkown_error}")

stored_obj = await self.create_item(ids, validated_response)
return stored_obj if stored_obj else validated_response.response

async def call_llm(self, request_params: dict) -> ValidatedResponse:
if MOCK_RESPONSE:
return ValidatedResponse(
response=MOCK_RESPONSE,
usage_statistics=CompletionUsage(
completion_tokens=0, prompt_tokens=0, total_tokens=0
),
message=MOCK_RESPONSE,
)
response = await self.oai_client.chat.completions.create(**request_params)
return self.parse(response)

async def create_item(
self, ids: Identifiers, validated_response: ValidatedResponse
):
Expand Down
7 changes: 7 additions & 0 deletions codex/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,13 @@ async def create_test_data():
"userId": user_1.user_id,
"updatedAt": datetime.now(),
},
{
"name": "TicTacToe Game",
"deleted": False,
"id": test_const.app_id_11,
"userId": user_id_2,
"updatedAt": datetime.now(),
},
]
)

Expand Down
126 changes: 1 addition & 125 deletions codex/deploy/agent.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,12 @@
import ast
import base64
import logging
import uuid
from typing import Tuple

import black
import isort
from prisma.models import CompiledRoute as CompiledRouteDBModel
from prisma.models import CompletedApp, Deployment
from prisma.types import DeploymentCreateInput

from codex.api_model import Identifiers
from codex.deploy.model import Application
from codex.deploy.packager import create_zip_file
from codex.develop.compile import create_server_code

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -41,121 +35,3 @@ async def create_deployment(ids: Identifiers, completedApp: CompletedApp) -> Dep
logger.exception("Error creating deployment in database")
raise ValueError(f"Error creating deployment in database: {e}")
return deployment


def create_server_code(completed_app: CompletedApp) -> Application:
"""
Args:
application (Application): _description_
Returns:
Application: _description_
"""
name = completed_app.name
desc = completed_app.description

server_code_imports = [
"from fastapi import FastAPI",
"from fastapi.responses import JSONResponse",
"import logging",
"import io",
"from typing import *",
]
server_code_header = f"""logger = logging.getLogger(__name__)
app = FastAPI(title="{name}", description='''{desc}''')"""

service_routes_code = []
if completed_app.CompiledRoutes is None:
raise ValueError("Application must have at least one compiled route.")

packages = []
main_function_names = set()
for i, compiled_route in enumerate(completed_app.CompiledRoutes):
if compiled_route.ApiRouteSpec is None:
raise ValueError(f"Compiled route {compiled_route.id} has no APIRouteSpec")

if compiled_route.Packages:
packages.extend(compiled_route.Packages)
request = compiled_route.ApiRouteSpec.RequestObject
response = compiled_route.ApiRouteSpec.ResponseObject

assert request is not None, f"RequestObject is required for {compiled_route.id}"
assert (
response is not None
), f"ResponseObject is required for {compiled_route.id}"

route_path = compiled_route.ApiRouteSpec.path
logger.info(f"Creating route for {route_path}")
# import the main function from the service file
compiled_route_module = compiled_route.fileName.replace(".py", "")
service_import = f"from {compiled_route_module} import *"
server_code_imports.append(service_import)

# Write the api endpoint
# TODO: pass the request method from the APIRouteSpec
response_type = "return JSONResponse(content=response)"
# horrible if if if for type checking
if response.Fields:
params = response.Fields
if (len(params) > 0) and (params[0].typeName == "bytes"):
response_type = """
# Convert the bytes to a BytesIO object for streaming
file_stream = io.BytesIO(response)
# Set the correct content-type for zip files
headers = {
"Content-Disposition": f"attachment; filename="new_file.zip""
}
# Return the streaming response
return StreamingResponse(
content=file_stream, media_type="application/zip", headers=headers
)
"""
assert request.Fields is not None, f"RequestObject {request.id} has no Fields"

request_param_str = ", ".join(
[f"{param.name}: {param.typeName}" for param in request.Fields]
)
param_names_str = ", ".join([param.name for param in request.Fields])

# method is a string here even though it should be an enum in the model
method_name = compiled_route.ApiRouteSpec.method.lower() # type: ignore
api_route_name = f"{method_name}_{compiled_route.mainFunctionName}_route"
if compiled_route.mainFunctionName in main_function_names:
main_function_names.add(compiled_route.mainFunctionName)

unique_end = uuid.uuid4().hex[:2]
api_route_name += f"_{unique_end}"

route_code = f"""@app.{method_name}("{route_path}")
async def {api_route_name}({request_param_str}):
try:
response = {compiled_route.mainFunctionName}({param_names_str})
except Exception as e:
logger.exception("Error processing request")
response = dict()
response["error"] = str(e)
return JSONResponse(content=response)
{response_type}
"""
service_routes_code.append(route_code)

# Compile the server code
server_code = "\n".join(server_code_imports)
server_code += "\n\n"
server_code += server_code_header
server_code += "\n\n"
server_code += "\n\n".join(service_routes_code)

# Update the application with the server code
sorted_content = isort.code(server_code)
formatted_code = black.format_str(sorted_content, mode=black.FileMode())
return Application(
name=name,
description=desc,
server_code=formatted_code,
completed_app=completed_app,
packages=packages,
)
11 changes: 9 additions & 2 deletions codex/deploy/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
FindManyCompiledRouteArgsFromCompletedApp,
ObjectTypeArgsFromAPIRouteSpecRecursive4,
)
from regex import R

import codex.database
import codex.deploy.agent as deploy_agent
Expand Down Expand Up @@ -53,7 +54,13 @@ async def create_deployment(
**{"include": {"Fields": True}}
),
)
)
),
RootFunction={ # type: ignore
"include": {
"FunctionArgs": {"include": {"Type": True}},
"FunctionReturn": {"include": {"Type": True}},
}
},
)
),
)
Expand Down Expand Up @@ -85,7 +92,7 @@ async def create_deployment(
file_size=deployment.fileSize,
)
except Exception as e:
logger.error(f"Error creating deployment: {e}")
logger.exception(f"Error creating deployment: {e}")
# File upload handling and metadata storage implementation goes here
return Response(
content=json.dumps(
Expand Down
Loading

0 comments on commit e95f544

Please sign in to comment.