Skip to content

Commit

Permalink
fix: unify generation outputs on newer openllm release (langchain-ai#…
Browse files Browse the repository at this point in the history
…10523)

update newer generation format from OpenLLm where it returns a
dictionary for one shot generation

cc @baskaryan 

Signed-off-by: Aaron <[email protected]>

---------

Signed-off-by: Aaron <[email protected]>
  • Loading branch information
aarnphm authored Sep 13, 2023
1 parent 201b61d commit ac9609f
Showing 1 changed file with 16 additions and 4 deletions.
20 changes: 16 additions & 4 deletions libs/langchain/langchain/llms/openllm.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,10 +265,16 @@ def _call(
self._identifying_params["model_name"], **copied
)
if self._client:
return self._client.query(prompt, **config.model_dump(flatten=True))
o = self._client.query(prompt, **config.model_dump(flatten=True))
if isinstance(o, dict) and "text" in o:
return o["text"]
return o
else:
assert self._runner is not None
return self._runner(prompt, **config.model_dump(flatten=True))
o = self._runner(prompt, **config.model_dump(flatten=True))
if isinstance(o, dict) and "text" in o:
return o["text"]
return o

async def _acall(
self,
Expand All @@ -291,9 +297,12 @@ async def _acall(
self._identifying_params["model_name"], **copied
)
if self._client:
return await self._client.acall(
o = await self._client.acall(
"generate", prompt, **config.model_dump(flatten=True)
)
if isinstance(o, dict) and "text" in o:
return o["text"]
return o
else:
assert self._runner is not None
(
Expand All @@ -304,6 +313,9 @@ async def _acall(
generated_result = await self._runner.generate.async_run(
prompt, **generate_kwargs
)
return self._runner.llm.postprocess_generate(
o = self._runner.llm.postprocess_generate(
prompt, generated_result, **postprocess_kwargs
)
if isinstance(o, dict) and "text" in o:
return o["text"]
return o

0 comments on commit ac9609f

Please sign in to comment.