Skip to content

Commit bf3cf6b

Browse files
committedDec 23, 2024
feat: implementing ollama 3.2 through ChatOllama models
1 parent a7ae3bc commit bf3cf6b

File tree

1 file changed

+7
-1
lines changed

1 file changed

+7
-1
lines changed
 

‎brickllm/helpers/llm_models.py

+7-1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from langchain.chat_models.base import BaseChatModel
44
from langchain_anthropic import ChatAnthropic
5+
from langchain_ollama import ChatOllama
56
from langchain_community.llms import Ollama
67
from langchain_fireworks import ChatFireworks
78
from langchain_openai import ChatOpenAI
@@ -23,6 +24,8 @@ def _get_model(model: Union[str, BaseChatModel]) -> BaseChatModel:
2324

2425
if model == "openai":
2526
return ChatOpenAI(temperature=0, model="gpt-4o")
27+
elif model == "ollama3.2":
28+
return ChatOllama(model="llama3.2")
2629
elif model == "anthropic":
2730
return ChatAnthropic(temperature=0, model_name="claude-3-sonnet-20240229")
2831
elif model == "fireworks":
@@ -31,5 +34,8 @@ def _get_model(model: Union[str, BaseChatModel]) -> BaseChatModel:
3134
)
3235
elif model == "llama3.1:8b-brick":
3336
return Ollama(model="llama3.1:8b-brick-v8")
37+
elif model == "hf.co/Giudice7/llama32-3B-brick-demo:latest":
38+
return Ollama(model="hf.co/Giudice7/llama32-3B-brick-demo:latest")
39+
3440
else:
35-
raise ValueError(f"Unsupported model type: {model}")
41+
raise ValueError(f"Unsupported model type: {model}. Load your own BaseChatModel if this one is not supported.")

0 commit comments

Comments
 (0)
Please sign in to comment.