Skip to content

Commit

Permalink
feat(providers): add gemini
Browse files Browse the repository at this point in the history
  • Loading branch information
ItzCrazyKns committed Nov 28, 2024
1 parent ecad065 commit 1777462
Show file tree
Hide file tree
Showing 8 changed files with 151 additions and 1 deletion.
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
"@langchain/anthropic": "^0.2.3",
"@langchain/community": "^0.2.16",
"@langchain/openai": "^0.0.25",
"@langchain/google-genai": "^0.0.23",
"@xenova/transformers": "^2.17.1",
"axios": "^1.6.8",
"better-sqlite3": "^11.0.0",
Expand Down
1 change: 1 addition & 0 deletions sample.config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef
GEMINI = "" # Gemini API key - sk-1234567890abcdef1234567890abcdef

[API_ENDPOINTS]
SEARXNG = "http://localhost:32768" # SearxNG API URL
Expand Down
3 changes: 3 additions & 0 deletions src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ interface Config {
OPENAI: string;
GROQ: string;
ANTHROPIC: string;
GEMINI: string;
};
API_ENDPOINTS: {
SEARXNG: string;
Expand Down Expand Up @@ -43,6 +44,8 @@ export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;

export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC;

export const getGeminiApiKey = () => loadConfig().API_KEYS.GEMINI;

export const getSearxngApiEndpoint = () =>
process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG;

Expand Down
69 changes: 69 additions & 0 deletions src/lib/providers/gemini.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import {
ChatGoogleGenerativeAI,
GoogleGenerativeAIEmbeddings,
} from '@langchain/google-genai';
import { getGeminiApiKey } from '../../config';
import logger from '../../utils/logger';

export const loadGeminiChatModels = async () => {
const geminiApiKey = getGeminiApiKey();

if (!geminiApiKey) return {};

try {
const chatModels = {
'gemini-1.5-flash': {
displayName: 'Gemini 1.5 Flash',
model: new ChatGoogleGenerativeAI({
modelName: 'gemini-1.5-flash',
temperature: 0.7,
apiKey: geminiApiKey,
}),
},
'gemini-1.5-flash-8b': {
displayName: 'Gemini 1.5 Flash 8B',
model: new ChatGoogleGenerativeAI({
modelName: 'gemini-1.5-flash-8b',
temperature: 0.7,
apiKey: geminiApiKey,
}),
},
'gemini-1.5-pro': {
displayName: 'Gemini 1.5 Pro',
model: new ChatGoogleGenerativeAI({
modelName: 'gemini-1.5-pro',
temperature: 0.7,
apiKey: geminiApiKey,
}),
},
};

return chatModels;
} catch (err) {
logger.error(`Error loading Gemini models: ${err}`);
return {};
}
};

export const loadGeminiEmbeddingsModels = async () => {
const geminiApiKey = getGeminiApiKey();

if (!geminiApiKey) return {};

try {
const embeddingModels = {
'text-embedding-004': {
displayName: 'Text Embedding',
model: new GoogleGenerativeAIEmbeddings({
apiKey: geminiApiKey,
modelName: 'text-embedding-004',
}),
},
};

return embeddingModels;
} catch (err) {
logger.error(`Error loading Gemini embeddings model: ${err}`);
return {};
}
};
3 changes: 3 additions & 0 deletions src/lib/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,21 @@ import { loadOllamaChatModels, loadOllamaEmbeddingsModels } from './ollama';
import { loadOpenAIChatModels, loadOpenAIEmbeddingsModels } from './openai';
import { loadAnthropicChatModels } from './anthropic';
import { loadTransformersEmbeddingsModels } from './transformers';
import { loadGeminiChatModels, loadGeminiEmbeddingsModels } from './gemini';

const chatModelProviders = {
openai: loadOpenAIChatModels,
groq: loadGroqChatModels,
ollama: loadOllamaChatModels,
anthropic: loadAnthropicChatModels,
gemini: loadGeminiChatModels,
};

const embeddingModelProviders = {
openai: loadOpenAIEmbeddingsModels,
local: loadTransformersEmbeddingsModels,
ollama: loadOllamaEmbeddingsModels,
gemini: loadGeminiEmbeddingsModels,
};

export const getAvailableChatModelProviders = async () => {
Expand Down
5 changes: 4 additions & 1 deletion src/routes/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import {
getGroqApiKey,
getOllamaApiEndpoint,
getAnthropicApiKey,
getGeminiApiKey,
getOpenaiApiKey,
updateConfig,
} from '../config';
Expand Down Expand Up @@ -52,7 +53,8 @@ router.get('/', async (_, res) => {
config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['anthropicApiKey'] = getAnthropicApiKey();
config['groqApiKey'] = getGroqApiKey();

config['geminiApiKey'] = getGeminiApiKey();

res.status(200).json(config);
} catch (err: any) {
res.status(500).json({ message: 'An error has occurred.' });
Expand All @@ -68,6 +70,7 @@ router.post('/', async (req, res) => {
OPENAI: config.openaiApiKey,
GROQ: config.groqApiKey,
ANTHROPIC: config.anthropicApiKey,
GEMINI: config.geminiApiKey,
},
API_ENDPOINTS: {
OLLAMA: config.ollamaApiUrl,
Expand Down
17 changes: 17 additions & 0 deletions ui/components/SettingsDialog.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ interface SettingsType {
openaiApiKey: string;
groqApiKey: string;
anthropicApiKey: string;
geminiApiKey: string;
ollamaApiUrl: string;
}

Expand Down Expand Up @@ -476,6 +477,22 @@ const SettingsDialog = ({
}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Gemini API Key
</p>
<Input
type="text"
placeholder="Gemini API key"
defaultValue={config.geminiApiKey}
onChange={(e) =>
setConfig({
...config,
geminiApiKey: e.target.value,
})
}
/>
</div>
</div>
)}
{isLoading && (
Expand Down
53 changes: 53 additions & 0 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -293,6 +293,11 @@
resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz#c57c8afbb4054a3ab8317591a0b7320360b444ae"
integrity sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==

"@google/generative-ai@^0.7.0":
version "0.7.1"
resolved "https://registry.yarnpkg.com/@google/generative-ai/-/generative-ai-0.7.1.tgz#eb187c75080c0706245699dbc06816c830d8c6a7"
integrity sha512-WTjMLLYL/xfA5BW6xAycRPiAX7FNHKAxrid/ayqC1QMam0KAK0NbMeS9Lubw80gVg5xFMLE+H7pw4wdNzTOlxw==

"@huggingface/jinja@^0.2.2":
version "0.2.2"
resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627"
Expand Down Expand Up @@ -380,6 +385,23 @@
zod "^3.22.4"
zod-to-json-schema "^3.22.3"

"@langchain/core@>=0.2.16 <0.3.0":
version "0.2.36"
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.2.36.tgz#75754c33aa5b9310dcf117047374a1ae011005a4"
integrity sha512-qHLvScqERDeH7y2cLuJaSAlMwg3f/3Oc9nayRSXRU2UuaK/SOhI42cxiPLj1FnuHJSmN0rBQFkrLx02gI4mcVg==
dependencies:
ansi-styles "^5.0.0"
camelcase "6"
decamelize "1.2.0"
js-tiktoken "^1.0.12"
langsmith "^0.1.56-rc.1"
mustache "^4.2.0"
p-queue "^6.6.2"
p-retry "4"
uuid "^10.0.0"
zod "^3.22.4"
zod-to-json-schema "^3.22.3"

"@langchain/core@>=0.2.9 <0.3.0":
version "0.2.15"
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.2.15.tgz#1bb99ac4fffe935c7ba37edcaa91abfba3c82219"
Expand Down Expand Up @@ -415,6 +437,15 @@
zod "^3.22.4"
zod-to-json-schema "^3.22.3"

"@langchain/google-genai@^0.0.23":
version "0.0.23"
resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-0.0.23.tgz#e73af501bc1df4c7642b531759b82dc3eb7ae459"
integrity sha512-MTSCJEoKsfU1inz0PWvAjITdNFM4s41uvBCwLpcgx3jWJIEisczFD82x86ahYqJlb2fD6tohYSaCH/4tKAdkXA==
dependencies:
"@google/generative-ai" "^0.7.0"
"@langchain/core" ">=0.2.16 <0.3.0"
zod-to-json-schema "^3.22.4"

"@langchain/openai@^0.0.25", "@langchain/openai@~0.0.19":
version "0.0.25"
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.0.25.tgz#8332abea1e3acb9b1169f90636e518c0ee90622e"
Expand Down Expand Up @@ -712,6 +743,11 @@
resolved "https://registry.yarnpkg.com/@types/triple-beam/-/triple-beam-1.3.5.tgz#74fef9ffbaa198eb8b588be029f38b00299caa2c"
integrity sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==

"@types/uuid@^10.0.0":
version "10.0.0"
resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-10.0.0.tgz#e9c07fe50da0f53dc24970cca94d619ff03f6f6d"
integrity sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ==

"@types/uuid@^9.0.1":
version "9.0.8"
resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-9.0.8.tgz#7545ba4fc3c003d6c756f651f3bf163d8f0f29ba"
Expand Down Expand Up @@ -1900,6 +1936,18 @@ langchainhub@~0.0.8:
resolved "https://registry.yarnpkg.com/langchainhub/-/langchainhub-0.0.8.tgz#fd4b96dc795e22e36c1a20bad31b61b0c33d3110"
integrity sha512-Woyb8YDHgqqTOZvWIbm2CaFDGfZ4NTSyXV687AG4vXEfoNo7cGQp7nhl7wL3ehenKWmNEmcxCLgOZzW8jE6lOQ==

langsmith@^0.1.56-rc.1:
version "0.1.68"
resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.1.68.tgz#848332e822fe5e6734a07f1c36b6530cc1798afb"
integrity sha512-otmiysWtVAqzMx3CJ4PrtUBhWRG5Co8Z4o7hSZENPjlit9/j3/vm3TSvbaxpDYakZxtMjhkcJTqrdYFipISEiQ==
dependencies:
"@types/uuid" "^10.0.0"
commander "^10.0.1"
p-queue "^6.6.2"
p-retry "4"
semver "^7.6.3"
uuid "^10.0.0"

langsmith@~0.1.1, langsmith@~0.1.7:
version "0.1.14"
resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.1.14.tgz#2b889dbcfb49547614df276a4a5a063092a1585d"
Expand Down Expand Up @@ -2568,6 +2616,11 @@ semver@^7.3.5, semver@^7.5.3, semver@^7.5.4:
dependencies:
lru-cache "^6.0.0"

semver@^7.6.3:
version "7.6.3"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143"
integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==

send@0.18.0:
version "0.18.0"
resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be"
Expand Down

0 comments on commit 1777462

Please sign in to comment.