Skip to content

Commit

Permalink
Allow the regular openai class to be used for ChatGPT models (langcha…
Browse files Browse the repository at this point in the history
…in-ai#1393)

Co-authored-by: Harrison Chase <[email protected]>
  • Loading branch information
nfcampos and hwchase17 authored Mar 2, 2023
1 parent 8947797 commit 499e76b
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 1 deletion.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ celerybeat.pid

# Environments
.env
.envrc
.venv
.venvs
env/
Expand Down
6 changes: 6 additions & 0 deletions langchain/llms/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,12 @@ class BaseOpenAI(BaseLLM, BaseModel):
streaming: bool = False
"""Whether to stream the results or not."""

def __new__(cls, **data: Any) -> Union[OpenAIChat, BaseOpenAI]: # type: ignore
"""Initialize the OpenAI object."""
if data.get("model_name", "").startswith("gpt-3.5-turbo"):
return OpenAIChat(**data)
return super().__new__(cls)

class Config:
"""Configuration for this pydantic object."""

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langchain"
version = "0.0.99"
version = "0.0.100"
description = "Building applications with LLMs through composability"
authors = []
license = "MIT"
Expand Down
7 changes: 7 additions & 0 deletions tests/integration_tests/llms/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,13 @@ async def test_openai_async_streaming_callback() -> None:
assert isinstance(result, LLMResult)


def test_openai_chat_wrong_class() -> None:
"""Test OpenAIChat with wrong class still works."""
llm = OpenAI(model_name="gpt-3.5-turbo")
output = llm("Say foo:")
assert isinstance(output, str)


def test_openai_chat() -> None:
"""Test OpenAIChat."""
llm = OpenAIChat(max_tokens=10)
Expand Down

0 comments on commit 499e76b

Please sign in to comment.