From 137356dbec745bb357e441cd69b24412078bb2b6 Mon Sep 17 00:00:00 2001 From: thepok Date: Tue, 13 Dec 2022 14:15:51 +0100 Subject: [PATCH] -1 max token description for openai (#330) --- langchain/llms/openai.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index 4cebef1d150a3..2ea83ee77c562 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -29,7 +29,9 @@ class OpenAI(LLM, BaseModel): temperature: float = 0.7 """What sampling temperature to use.""" max_tokens: int = 256 - """The maximum number of tokens to generate in the completion.""" + """The maximum number of tokens to generate in the completion. + -1 returns as many tokens as possible given the prompt and + the models maximal context size.""" top_p: float = 1 """Total probability mass of tokens to consider at each step.""" frequency_penalty: float = 0