Skip to content

Commit

Permalink
add handling on error (langchain-ai#541)
Browse files Browse the repository at this point in the history
  • Loading branch information
hwchase17 authored Jan 5, 2023
1 parent 1631981 commit 5aefc2b
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 10 deletions.
11 changes: 8 additions & 3 deletions langchain/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,9 +272,14 @@ def _call(self, inputs: Dict[str, str]) -> Dict[str, Any]:
self.callback_manager.on_tool_start(
{"name": str(chain)[:60] + "..."}, output, color="green"
)
# We then call the tool on the tool input to get an observation
observation = chain(output.tool_input)
color = color_mapping[output.tool]
try:
# We then call the tool on the tool input to get an observation
observation = chain(output.tool_input)
color = color_mapping[output.tool]
except Exception as e:
if self.verbose:
self.callback_manager.on_tool_error(e)
raise e
else:
if self.verbose:
self.callback_manager.on_tool_start(
Expand Down
7 changes: 6 additions & 1 deletion langchain/chains/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,12 @@ def __call__(
self.callback_manager.on_chain_start(
{"name": self.__class__.__name__}, inputs
)
outputs = self._call(inputs)
try:
outputs = self._call(inputs)
except Exception as e:
if self.verbose:
self.callback_manager.on_chain_error(e)
raise e
if self.verbose:
self.callback_manager.on_chain_end(outputs)
self._validate_outputs(outputs)
Expand Down
24 changes: 18 additions & 6 deletions langchain/llms/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,12 @@ def generate(
self.callback_manager.on_llm_start(
{"name": self.__class__.__name__}, prompts
)
output = self._generate(prompts, stop=stop)
try:
output = self._generate(prompts, stop=stop)
except Exception as e:
if self.verbose:
self.callback_manager.on_llm_error(e)
raise e
if self.verbose:
self.callback_manager.on_llm_end(output)
return output
Expand All @@ -90,11 +95,18 @@ def generate(
else:
missing_prompts.append(prompt)
missing_prompt_idxs.append(i)
self.callback_manager.on_llm_start(
{"name": self.__class__.__name__}, missing_prompts
)
new_results = self._generate(missing_prompts, stop=stop)
self.callback_manager.on_llm_end(new_results)
if self.verbose:
self.callback_manager.on_llm_start(
{"name": self.__class__.__name__}, missing_prompts
)
try:
new_results = self._generate(missing_prompts, stop=stop)
except Exception as e:
if self.verbose:
self.callback_manager.on_llm_error(e)
raise e
if self.verbose:
self.callback_manager.on_llm_end(new_results)
for i, result in enumerate(new_results.generations):
existing_prompts[missing_prompt_idxs[i]] = result
prompt = prompts[i]
Expand Down

0 comments on commit 5aefc2b

Please sign in to comment.