Skip to content

Commit

Permalink
remove code (langchain-ai#8425)
Browse files Browse the repository at this point in the history
  • Loading branch information
hwchase17 authored Jul 28, 2023
1 parent 3a78450 commit fab2445
Show file tree
Hide file tree
Showing 51 changed files with 13 additions and 3,881 deletions.
23 changes: 2 additions & 21 deletions libs/experimental/langchain_experimental/pal_chain/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,14 @@
from __future__ import annotations

import ast
import warnings
from typing import Any, Dict, List, Optional

from langchain.callbacks.manager import CallbackManagerForChainRun
from langchain.chains.base import Chain
from langchain.chains.llm import LLMChain
from langchain.schema import BasePromptTemplate
from langchain.schema.language_model import BaseLanguageModel
from langchain.utilities import PythonREPL
from pydantic import Extra, Field, root_validator
from pydantic import Extra, Field

from langchain_experimental.pal_chain.colored_object_prompt import COLORED_OBJECT_PROMPT
from langchain_experimental.pal_chain.math_prompt import MATH_PROMPT
Expand Down Expand Up @@ -95,10 +93,6 @@ class PALChain(Chain):
"""

llm_chain: LLMChain
llm: Optional[BaseLanguageModel] = None
"""[Deprecated]"""
prompt: BasePromptTemplate = MATH_PROMPT
"""[Deprecated]"""
stop: str = "\n\n"
"""Stop token to use when generating code."""
get_answer_expr: str = "print(solution())"
Expand All @@ -121,26 +115,13 @@ class Config:
extra = Extra.forbid
arbitrary_types_allowed = True

@root_validator(pre=True)
def raise_deprecation(cls, values: Dict) -> Dict:
if "llm" in values:
warnings.warn(
"Directly instantiating a PALChain with an llm is deprecated. "
"Please instantiate with llm_chain argument or using one of "
"the class method constructors from_math_prompt, "
"from_colored_object_prompt."
)
if "llm_chain" not in values and values["llm"] is not None:
values["llm_chain"] = LLMChain(llm=values["llm"], prompt=MATH_PROMPT)
return values

@property
def input_keys(self) -> List[str]:
"""Return the singular input key.
:meta private:
"""
return self.prompt.input_variables
return self.llm_chain.prompt.input_variables

@property
def output_keys(self) -> List[str]:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""Test LLM PAL functionality."""
import pytest

from langchain.chains.pal.base import PALChain, PALValidation
from langchain.chains.pal.colored_object_prompt import COLORED_OBJECT_PROMPT
from langchain.chains.pal.math_prompt import MATH_PROMPT
from tests.unit_tests.llms.fake_llm import FakeLLM
from langchain_experimental.pal_chain.base import PALChain, PALValidation
from langchain_experimental.pal_chain.colored_object_prompt import COLORED_OBJECT_PROMPT
from langchain_experimental.pal_chain.math_prompt import MATH_PROMPT
from tests.unit_tests.fake_llm import FakeLLM

_MATH_SOLUTION_1 = """
def solution():
Expand Down
4 changes: 0 additions & 4 deletions libs/langchain/langchain/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,7 @@
LLMChain,
LLMCheckerChain,
LLMMathChain,
PALChain,
QAWithSourcesChain,
SQLDatabaseChain,
VectorDBQA,
VectorDBQAWithSourcesChain,
)
Expand Down Expand Up @@ -106,7 +104,6 @@
"SagemakerEndpoint",
"HuggingFacePipeline",
"SQLDatabase",
"SQLDatabaseChain",
"PowerBIDataset",
"FAISS",
"MRKLChain",
Expand All @@ -116,7 +113,6 @@
"ConversationChain",
"VectorDBQAWithSourcesChain",
"QAWithSourcesChain",
"PALChain",
"LlamaCpp",
"HuggingFaceTextGenInference",
]
19 changes: 0 additions & 19 deletions libs/langchain/langchain/agents/load_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from langchain.chains.api import news_docs, open_meteo_docs, podcast_docs, tmdb_docs
from langchain.chains.api.base import APIChain
from langchain.chains.llm_math.base import LLMMathChain
from langchain.chains.pal.base import PALChain
from langchain.utilities.requests import TextRequestsWrapper
from langchain.tools.arxiv.tool import ArxivQueryRun
from langchain.tools.golden_query.tool import GoldenQueryRun
Expand Down Expand Up @@ -105,22 +104,6 @@ def _get_sleep() -> BaseTool:
}


def _get_pal_math(llm: BaseLanguageModel) -> BaseTool:
return Tool(
name="PAL-MATH",
description="A language model that is really good at solving complex word math problems. Input should be a fully worded hard word math problem.",
func=PALChain.from_math_prompt(llm).run,
)


def _get_pal_colored_objects(llm: BaseLanguageModel) -> BaseTool:
return Tool(
name="PAL-COLOR-OBJ",
description="A language model that is really good at reasoning about position and the color attributes of objects. Input should be a fully worded hard reasoning problem. Make sure to include all information about the objects AND the final question you want to answer.",
func=PALChain.from_colored_object_prompt(llm).run,
)


def _get_llm_math(llm: BaseLanguageModel) -> BaseTool:
return Tool(
name="Calculator",
Expand All @@ -140,8 +123,6 @@ def _get_open_meteo_api(llm: BaseLanguageModel) -> BaseTool:


_LLM_TOOLS: Dict[str, Callable[[BaseLanguageModel], BaseTool]] = {
"pal-math": _get_pal_math,
"pal-colored-objects": _get_pal_colored_objects,
"llm-math": _get_llm_math,
"open-meteo-api": _get_open_meteo_api,
}
Expand Down
8 changes: 0 additions & 8 deletions libs/langchain/langchain/chains/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@
create_tagging_chain,
create_tagging_chain_pydantic,
)
from langchain.chains.pal.base import PALChain
from langchain.chains.qa_generation.base import QAGenerationChain
from langchain.chains.qa_with_sources.base import QAWithSourcesChain
from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesChain
Expand All @@ -69,10 +68,6 @@
RouterChain,
)
from langchain.chains.sequential import SequentialChain, SimpleSequentialChain
from langchain.chains.sql_database.base import (
SQLDatabaseChain,
SQLDatabaseSequentialChain,
)
from langchain.chains.sql_database.query import create_sql_query_chain
from langchain.chains.transform import TransformChain

Expand Down Expand Up @@ -108,16 +103,13 @@
"NebulaGraphQAChain",
"OpenAIModerationChain",
"OpenAPIEndpointChain",
"PALChain",
"QAGenerationChain",
"QAWithSourcesChain",
"ReduceDocumentsChain",
"RefineDocumentsChain",
"RetrievalQA",
"RetrievalQAWithSourcesChain",
"RouterChain",
"SQLDatabaseChain",
"SQLDatabaseSequentialChain",
"SequentialChain",
"SimpleSequentialChain",
"StuffDocumentsChain",
Expand Down
31 changes: 7 additions & 24 deletions libs/langchain/langchain/chains/loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,9 @@
from langchain.chains.llm_checker.base import LLMCheckerChain
from langchain.chains.llm_math.base import LLMMathChain
from langchain.chains.llm_requests import LLMRequestsChain
from langchain.chains.pal.base import PALChain
from langchain.chains.qa_with_sources.base import QAWithSourcesChain
from langchain.chains.qa_with_sources.vector_db import VectorDBQAWithSourcesChain
from langchain.chains.retrieval_qa.base import RetrievalQA, VectorDBQA
from langchain.chains.sql_database.base import SQLDatabaseChain
from langchain.llms.loading import load_llm, load_llm_from_config
from langchain.prompts.loading import (
_load_output_parser,
Expand Down Expand Up @@ -266,34 +264,17 @@ def _load_map_rerank_documents_chain(
return MapRerankDocumentsChain(llm_chain=llm_chain, **config)


def _load_pal_chain(config: dict, **kwargs: Any) -> PALChain:
llm_chain = None
def _load_pal_chain(config: dict, **kwargs: Any) -> Any:
from langchain_experimental.pal_chain import PALChain

if "llm_chain" in config:
llm_chain_config = config.pop("llm_chain")
llm_chain = load_chain_from_config(llm_chain_config)
elif "llm_chain_path" in config:
llm_chain = load_chain(config.pop("llm_chain_path"))
# llm attribute is deprecated in favor of llm_chain, here to support old configs
elif "llm" in config:
llm_config = config.pop("llm")
llm = load_llm_from_config(llm_config)
# llm_path attribute is deprecated in favor of llm_chain_path,
# its to support old configs
elif "llm_path" in config:
llm = load_llm(config.pop("llm_path"))
else:
raise ValueError("One of `llm_chain` or `llm_chain_path` must be present.")
if "prompt" in config:
prompt_config = config.pop("prompt")
prompt = load_prompt_from_config(prompt_config)
elif "prompt_path" in config:
prompt = load_prompt(config.pop("prompt_path"))
else:
raise ValueError("One of `prompt` or `prompt_path` must be present.")
if llm_chain:
return PALChain(llm_chain=llm_chain, prompt=prompt, **config)
else:
return PALChain(llm=llm, prompt=prompt, **config)
return PALChain(llm_chain=llm_chain, **config)


def _load_refine_documents_chain(config: dict, **kwargs: Any) -> RefineDocumentsChain:
Expand Down Expand Up @@ -342,7 +323,7 @@ def _load_qa_with_sources_chain(config: dict, **kwargs: Any) -> QAWithSourcesCha
return QAWithSourcesChain(combine_documents_chain=combine_documents_chain, **config)


def _load_sql_database_chain(config: dict, **kwargs: Any) -> SQLDatabaseChain:
def _load_sql_database_chain(config: dict, **kwargs: Any) -> Any:
if "database" in kwargs:
database = kwargs.pop("database")
else:
Expand All @@ -359,6 +340,8 @@ def _load_sql_database_chain(config: dict, **kwargs: Any) -> SQLDatabaseChain:
prompt = load_prompt_from_config(prompt_config)
else:
prompt = None
from langchain_experimental.sql import SQLDatabaseChain

return SQLDatabaseChain.from_llm(llm, database, prompt=prompt, **config)


Expand Down
6 changes: 0 additions & 6 deletions libs/langchain/langchain/chains/pal/__init__.py

This file was deleted.

Loading

0 comments on commit fab2445

Please sign in to comment.