forked from langflow-ai/langflow
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Added OllamaEmbeddings component with documentation (langflow-ai#1309)
Ollama embeddings are useful to enhance langflow's support of Ollama, allowing users to run LLMs such as Mistral and LLama locally. Langchain documentation can be found via [this link](https://python.langchain.com/docs/integrations/text_embedding/ollama). Changes: - New `OllamaEmbeddingsComponent` class - Associated documentation in the `Embeddings` section
- Loading branch information
Showing
3 changed files
with
58 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
41 changes: 41 additions & 0 deletions
41
src/backend/langflow/components/embeddings/OllamaEmbeddings.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
from typing import Optional | ||
|
||
from langflow import CustomComponent | ||
from langchain.embeddings.base import Embeddings | ||
from langchain_community.embeddings import OllamaEmbeddings | ||
|
||
class OllamaEmbeddingsComponent(CustomComponent): | ||
""" | ||
A custom component for implementing an Embeddings Model using Ollama. | ||
""" | ||
|
||
display_name: str = "Ollama Embeddings" | ||
description: str = "Embeddings model from Ollama." | ||
documentation = "https://python.langchain.com/docs/integrations/text_embedding/ollama" | ||
beta = True | ||
|
||
def build_config(self): | ||
return { | ||
"model": { | ||
"display_name": "Ollama Model", | ||
}, | ||
"base_url": {"display_name": "Ollama Base URL"}, | ||
"temperature": {"display_name": "Model Temperature"}, | ||
"code": {"show": False}, | ||
} | ||
|
||
def build( | ||
self, | ||
model: str = "llama2", | ||
base_url: str = "http://localhost:11434", | ||
temperature: Optional[float] = None, | ||
) -> Embeddings: | ||
try: | ||
output = OllamaEmbeddings( | ||
model=model, | ||
base_url=base_url, | ||
temperature=temperature | ||
) # type: ignore | ||
except Exception as e: | ||
raise ValueError("Could not connect to Ollama API.") from e | ||
return output |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters