Skip to content

Commit

Permalink
Merge pull request #55 from upstash/add-new-models
Browse files Browse the repository at this point in the history
feat: add new models
  • Loading branch information
ogzhanolguncu authored Aug 14, 2024
2 parents ed74b4b + d3b189c commit 0bea9d9
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 3 deletions.
Binary file modified bun.lockb
Binary file not shown.
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@
"react-dom": "^18.3.1"
},
"optionalDependencies": {
"langsmith": "^0.1.41"
"langsmith": "^0.1.41",
"@langchain/mistralai": "^0.0.28"
}
}
38 changes: 37 additions & 1 deletion src/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import type { OpenAIChatInput } from "@langchain/openai";
import { ChatOpenAI } from "@langchain/openai";
import { Client as LangsmithClient } from "langsmith";
import type { OLLAMA_MODELS } from "./constants";
import { ChatMistralAI } from "@langchain/mistralai";

// Initialize global Langsmith tracer
// We use a global variable because:
Expand Down Expand Up @@ -61,7 +62,15 @@ export type LLMClientConfig = {
baseUrl: string;
};

type Providers = "openai" | "upstash" | "custom" | "ollama";
type Providers =
| "openai"
| "upstash"
| "custom"
| "ollama"
| "groq"
| "togetherai"
| "openrouter"
| "mistral";
type AnalyticsConfig =
| { name: "helicone"; token: string }
| { name: "langsmith"; token: string; apiUrl?: string };
Expand Down Expand Up @@ -216,3 +225,30 @@ export const ollama = (

return createLLMClient(model, { ...options, baseUrl: `${baseUrl}/v1` }, "ollama");
};

export const groq = (model: string, options?: Omit<ModelOptions, "baseUrl">) => {
return createLLMClient(model, { ...options, baseUrl: "https://api.groq.com/openai/v1" }, "groq");
};

export const togetherai = (model: string, options?: Omit<ModelOptions, "baseUrl">) => {
return createLLMClient(
model,
{ ...options, baseUrl: "https://api.together.xyz/v1" },
"togetherai"
);
};

export const openrouter = (model: string, options?: Omit<ModelOptions, "baseUrl">) => {
return createLLMClient(
model,
{ ...options, baseUrl: "https://openrouter.ai/api/v1" },
"openrouter"
);
};
/** Mistral AI does not support any analytics */
export const mistralai = (model: string, options?: Omit<ModelOptions, "baseUrl">) => {
return new ChatMistralAI({
model,
...options,
});
};
4 changes: 3 additions & 1 deletion src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import type { Ratelimit } from "@upstash/ratelimit";
import type { Redis } from "@upstash/redis";
import type { Index } from "@upstash/vector";
import type { CustomPrompt } from "./rag-chat";
import type { ChatMistralAI } from "@langchain/mistralai";

declare const __brand: unique symbol;
type Brand<B> = { [__brand]: B };
Expand Down Expand Up @@ -87,8 +88,9 @@ export type RAGChatConfig = {
apiKey,
})
*/
model?: ChatOpenAI | OpenAIChatLanguageModel;

// eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents
model?: ChatOpenAI | ChatMistralAI | OpenAIChatLanguageModel;
/**
* Ratelimit instance
* @example new Ratelimit({
Expand Down

0 comments on commit 0bea9d9

Please sign in to comment.