Skip to content

Commit

Permalink
Merge branch 'danny-avila:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
paychex-ssmithrand authored Nov 20, 2024
2 parents c792021 + 951bb9d commit 365f966
Show file tree
Hide file tree
Showing 93 changed files with 2,018 additions and 3,312 deletions.
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -352,6 +352,7 @@ ILLEGAL_MODEL_REQ_SCORE=5
#========================#

CHECK_BALANCE=false
# START_BALANCE=20000 # note: the number of tokens that will be credited after registration.

#========================#
# Registration and Login #
Expand Down
6 changes: 3 additions & 3 deletions api/app/clients/GoogleClient.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
const { google } = require('googleapis');
const { Agent, ProxyAgent } = require('undici');
const { ChatVertexAI } = require('@langchain/google-vertexai');
const { GoogleVertexAI } = require('@langchain/google-vertexai');
const { ChatGoogleVertexAI } = require('@langchain/google-vertexai');
const { ChatGoogleGenerativeAI } = require('@langchain/google-genai');
const { GoogleGenerativeAI: GenAI } = require('@google/generative-ai');
const { GoogleVertexAI } = require('@langchain/community/llms/googlevertexai');
const { ChatGoogleVertexAI } = require('langchain/chat_models/googlevertexai');
const { AIMessage, HumanMessage, SystemMessage } = require('langchain/schema');
const { AIMessage, HumanMessage, SystemMessage } = require('@langchain/core/messages');
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
const {
validateVisionModel,
Expand Down
9 changes: 3 additions & 6 deletions api/app/clients/PluginsClient.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
const OpenAIClient = require('./OpenAIClient');
const { CallbackManager } = require('langchain/callbacks');
const { CacheKeys, Time } = require('librechat-data-provider');
const { CallbackManager } = require('@langchain/core/callbacks/manager');
const { BufferMemory, ChatMessageHistory } = require('langchain/memory');
const { initializeCustomAgent, initializeFunctionsAgent } = require('./agents');
const { addImages, buildErrorInput, buildPromptPrefix } = require('./output_parsers');
const { initializeCustomAgent, initializeFunctionsAgent } = require('./agents');
const { processFileURL } = require('~/server/services/Files/process');
const { EModelEndpoint } = require('librechat-data-provider');
const { formatLangChainMessages } = require('./prompts');
const checkBalance = require('~/models/checkBalance');
const { SelfReflectionTool } = require('./tools');
const { isEnabled } = require('~/server/utils');
const { extractBaseURL } = require('~/utils');
const { loadTools } = require('./tools/util');
Expand Down Expand Up @@ -122,9 +121,7 @@ class PluginsClient extends OpenAIClient {
},
});

if (this.tools.length > 0 && !this.functionsAgent) {
this.tools.push(new SelfReflectionTool({ message, isGpt3: false }));
} else if (this.tools.length === 0) {
if (this.tools.length === 0) {
return;
}

Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/agents/CustomAgent/CustomAgent.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
const { ZeroShotAgent } = require('langchain/agents');
const { PromptTemplate, renderTemplate } = require('langchain/prompts');
const { PromptTemplate, renderTemplate } = require('@langchain/core/prompts');
const { gpt3, gpt4 } = require('./instructions');

class CustomAgent extends ZeroShotAgent {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ const {
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
} = require('langchain/prompts');
} = require('@langchain/core/prompts');

const initializeCustomAgent = async ({
tools,
Expand Down
122 changes: 0 additions & 122 deletions api/app/clients/agents/Functions/FunctionsAgent.js

This file was deleted.

2 changes: 1 addition & 1 deletion api/app/clients/document/tokenSplit.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
const { TokenTextSplitter } = require('langchain/text_splitter');
const { TokenTextSplitter } = require('@langchain/textsplitters');

/**
* Splits a given text by token chunks, based on the provided parameters for the TokenTextSplitter.
Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/document/tokenSplit.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ describe('tokenSplit', () => {
returnSize: 5,
});

expect(result).toEqual(['. Null', ' Nullam', 'am id', ' id.', '.']);
expect(result).toEqual(['it.', '. Null', ' Nullam', 'am id', ' id.']);
});

it('returns correct text chunks with default parameters', async () => {
Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/llm/createLLM.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
const { ChatOpenAI } = require('langchain/chat_models/openai');
const { ChatOpenAI } = require('@langchain/openai');
const { sanitizeModelName, constructAzureURL } = require('~/utils');
const { isEnabled } = require('~/server/utils');

Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/memory/summaryBuffer.demo.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
require('dotenv').config();
const { ChatOpenAI } = require('langchain/chat_models/openai');
const { ChatOpenAI } = require('@langchain/openai');
const { getBufferString, ConversationSummaryBufferMemory } = require('langchain/memory');

const chatPromptMemory = new ConversationSummaryBufferMemory({
Expand Down
4 changes: 2 additions & 2 deletions api/app/clients/prompts/formatAgentMessages.spec.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
const { ToolMessage } = require('@langchain/core/messages');
const { ContentTypes } = require('librechat-data-provider');
const { HumanMessage, AIMessage, SystemMessage } = require('langchain/schema');
const { HumanMessage, AIMessage, SystemMessage } = require('@langchain/core/messages');
const { formatAgentMessages } = require('./formatMessages');

describe('formatAgentMessages', () => {
Expand Down Expand Up @@ -120,7 +120,7 @@ describe('formatAgentMessages', () => {
];
const result = formatAgentMessages(payload);
expect(result).toHaveLength(2);
expect(result[0].tool_calls[0].args).toBe('non-json-string');
expect(result[0].tool_calls[0].args).toStrictEqual({ input: 'non-json-string' });
});

it('should handle complex tool calls with multiple steps', () => {
Expand Down
9 changes: 6 additions & 3 deletions api/app/clients/prompts/formatMessages.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
const { ToolMessage } = require('@langchain/core/messages');
const { EModelEndpoint, ContentTypes } = require('librechat-data-provider');
const { HumanMessage, AIMessage, SystemMessage } = require('langchain/schema');
const { HumanMessage, AIMessage, SystemMessage } = require('@langchain/core/messages');

/**
* Formats a message to OpenAI Vision API payload format.
Expand Down Expand Up @@ -189,10 +189,13 @@ const formatAgentMessages = (payload) => {
// TODO: investigate; args as dictionary may need to be provider-or-tool-specific
let args = _args;
try {
args = JSON.parse(args);
args = JSON.parse(_args);
} catch (e) {
// failed to parse, leave as is
if (typeof _args === 'string') {
args = { input: _args };
}
}

tool_call.args = args;
lastAIMessage.tool_calls.push(tool_call);

Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/prompts/formatMessages.spec.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
const { Constants } = require('librechat-data-provider');
const { HumanMessage, AIMessage, SystemMessage } = require('langchain/schema');
const { HumanMessage, AIMessage, SystemMessage } = require('@langchain/core/messages');
const { formatMessage, formatLangChainMessages, formatFromLangChain } = require('./formatMessages');

describe('formatMessage', () => {
Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/prompts/summaryPrompts.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
const { PromptTemplate } = require('langchain/prompts');
const { PromptTemplate } = require('@langchain/core/prompts');
/*
* Without `{summary}` and `{new_lines}`, token count is 98
* We are counting this towards the max context tokens for summaries, +3 for the assistant label (101)
Expand Down
20 changes: 17 additions & 3 deletions api/app/clients/prompts/titlePrompts.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ const {
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
} = require('langchain/prompts');
} = require('@langchain/core/prompts');

const langPrompt = new ChatPromptTemplate({
promptMessages: [
Expand Down Expand Up @@ -99,10 +99,24 @@ ONLY include the generated translation without quotations, nor its related key</
* @returns {string} The parsed parameter's value or a default value if not found.
*/
function parseParamFromPrompt(prompt, paramName) {
const paramRegex = new RegExp(`<${paramName}>([\\s\\S]+?)</${paramName}>`);
// Handle null/undefined prompt
if (!prompt) {
return `No ${paramName} provided`;
}

// Try original format first: <title>value</title>
const simpleRegex = new RegExp(`<${paramName}>(.*?)</${paramName}>`, 's');
const simpleMatch = prompt.match(simpleRegex);

if (simpleMatch) {
return simpleMatch[1].trim();
}

// Try parameter format: <parameter name="title">value</parameter>
const paramRegex = new RegExp(`<parameter name="${paramName}">(.*?)</parameter>`, 's');
const paramMatch = prompt.match(paramRegex);

if (paramMatch && paramMatch[1]) {
if (paramMatch) {
return paramMatch[1].trim();
}

Expand Down
73 changes: 73 additions & 0 deletions api/app/clients/prompts/titlePrompts.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
const { parseParamFromPrompt } = require('./titlePrompts');
describe('parseParamFromPrompt', () => {
// Original simple format tests
test('extracts parameter from simple format', () => {
const prompt = '<title>Simple Title</title>';
expect(parseParamFromPrompt(prompt, 'title')).toBe('Simple Title');
});

// Parameter format tests
test('extracts parameter from parameter format', () => {
const prompt =
'<function_calls> <invoke name="submit_title"> <parameter name="title">Complex Title</parameter> </invoke>';
expect(parseParamFromPrompt(prompt, 'title')).toBe('Complex Title');
});

// Edge cases and error handling
test('returns NO TOOL INVOCATION message for non-matching content', () => {
const prompt = 'Some random text without parameters';
expect(parseParamFromPrompt(prompt, 'title')).toBe(
'NO TOOL INVOCATION: Some random text without parameters',
);
});

test('returns default message for empty prompt', () => {
expect(parseParamFromPrompt('', 'title')).toBe('No title provided');
});

test('returns default message for null prompt', () => {
expect(parseParamFromPrompt(null, 'title')).toBe('No title provided');
});

// Multiple parameter tests
test('works with different parameter names', () => {
const prompt = '<name>John Doe</name>';
expect(parseParamFromPrompt(prompt, 'name')).toBe('John Doe');
});

test('handles multiline content', () => {
const prompt = `<parameter name="description">This is a
multiline
description</parameter>`;
expect(parseParamFromPrompt(prompt, 'description')).toBe(
'This is a\n multiline\n description',
);
});

// Whitespace handling
test('trims whitespace from extracted content', () => {
const prompt = '<title> Padded Title </title>';
expect(parseParamFromPrompt(prompt, 'title')).toBe('Padded Title');
});

test('handles whitespace in parameter format', () => {
const prompt = '<parameter name="title"> Padded Parameter Title </parameter>';
expect(parseParamFromPrompt(prompt, 'title')).toBe('Padded Parameter Title');
});

// Invalid format tests
test('handles malformed tags', () => {
const prompt = '<title>Incomplete Tag';
expect(parseParamFromPrompt(prompt, 'title')).toBe('NO TOOL INVOCATION: <title>Incomplete Tag');
});

test('handles empty tags', () => {
const prompt = '<title></title>';
expect(parseParamFromPrompt(prompt, 'title')).toBe('');
});

test('handles empty parameter tags', () => {
const prompt = '<parameter name="title"></parameter>';
expect(parseParamFromPrompt(prompt, 'title')).toBe('');
});
});
2 changes: 1 addition & 1 deletion api/app/clients/specs/BaseClient.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ jest.mock('~/models', () => ({
updateFileUsage: jest.fn(),
}));

jest.mock('langchain/chat_models/openai', () => {
jest.mock('@langchain/openai', () => {
return {
ChatOpenAI: jest.fn().mockImplementation(() => {
return {};
Expand Down
2 changes: 1 addition & 1 deletion api/app/clients/specs/OpenAIClient.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jest.mock('~/models', () => ({
updateFileUsage: jest.fn(),
}));

jest.mock('langchain/chat_models/openai', () => {
jest.mock('@langchain/openai', () => {
return {
ChatOpenAI: jest.fn().mockImplementation(() => {
return {};
Expand Down
Loading

0 comments on commit 365f966

Please sign in to comment.