From 39fc0b3dfe446a269cff3aa122afeb60915c6a08 Mon Sep 17 00:00:00 2001 From: jemeza-codegen Date: Tue, 18 Mar 2025 11:24:14 -0700 Subject: [PATCH 1/2] fix!: increases output token limit for claude-3-7 to 12k --- src/codegen/extensions/langchain/llm.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/codegen/extensions/langchain/llm.py b/src/codegen/extensions/langchain/llm.py index 0d4795740..8191c7151 100644 --- a/src/codegen/extensions/langchain/llm.py +++ b/src/codegen/extensions/langchain/llm.py @@ -89,7 +89,8 @@ def _get_model(self) -> BaseChatModel: if not os.getenv("ANTHROPIC_API_KEY"): msg = "ANTHROPIC_API_KEY not found in environment. Please set it in your .env file or environment variables." raise ValueError(msg) - return ChatAnthropic(**self._get_model_kwargs(), max_tokens=8192, max_retries=10, timeout=1000) + max_tokens = 12000 if "claude-3-7" in self.model_name else 8192 + return ChatAnthropic(**self._get_model_kwargs(), max_tokens=max_tokens, max_retries=10, timeout=1000) elif self.model_provider == "openai": if not os.getenv("OPENAI_API_KEY"): @@ -101,7 +102,7 @@ def _get_model(self) -> BaseChatModel: if not os.getenv("XAI_API_KEY"): msg = "XAI_API_KEY not found in environment. Please set it in your .env file or environment variables." raise ValueError(msg) - return ChatXAI(**self._get_model_kwargs(), max_tokens=8192) + return ChatXAI(**self._get_model_kwargs(), max_tokens=12000) msg = f"Unknown model provider: {self.model_provider}. Must be one of: anthropic, openai, xai" raise ValueError(msg) From 925d5148bf8698f0a28b954145408d8fc0dd55b2 Mon Sep 17 00:00:00 2001 From: jemeza-codegen Date: Tue, 18 Mar 2025 11:33:46 -0700 Subject: [PATCH 2/2] fix: increasing token limit to 8192 * 2 --- src/codegen/extensions/langchain/llm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/codegen/extensions/langchain/llm.py b/src/codegen/extensions/langchain/llm.py index 8191c7151..4c457e46d 100644 --- a/src/codegen/extensions/langchain/llm.py +++ b/src/codegen/extensions/langchain/llm.py @@ -89,7 +89,7 @@ def _get_model(self) -> BaseChatModel: if not os.getenv("ANTHROPIC_API_KEY"): msg = "ANTHROPIC_API_KEY not found in environment. Please set it in your .env file or environment variables." raise ValueError(msg) - max_tokens = 12000 if "claude-3-7" in self.model_name else 8192 + max_tokens = 16384 if "claude-3-7" in self.model_name else 8192 return ChatAnthropic(**self._get_model_kwargs(), max_tokens=max_tokens, max_retries=10, timeout=1000) elif self.model_provider == "openai":