Skip to content

Commit

Permalink
Merge branch 'danny-avila:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
Donavan authored Nov 7, 2024
2 parents fbb7616 + 81f2936 commit 0569315
Show file tree
Hide file tree
Showing 10 changed files with 153 additions and 97 deletions.
36 changes: 28 additions & 8 deletions api/server/routes/files/multer.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,21 +30,41 @@ const importFileFilter = (req, file, cb) => {
}
};

const fileFilter = (req, file, cb) => {
if (!file) {
return cb(new Error('No file provided'), false);
}
/**
*
* @param {import('librechat-data-provider').FileConfig | undefined} customFileConfig
*/
const createFileFilter = (customFileConfig) => {
/**
* @param {ServerRequest} req
* @param {Express.Multer.File}
* @param {import('multer').FileFilterCallback} cb
*/
const fileFilter = (req, file, cb) => {
if (!file) {
return cb(new Error('No file provided'), false);
}

if (!defaultFileConfig.checkType(file.mimetype)) {
return cb(new Error('Unsupported file type: ' + file.mimetype), false);
}
const endpoint = req.body.endpoint;
const supportedTypes =
customFileConfig?.endpoints?.[endpoint]?.supportedMimeTypes ??
customFileConfig?.endpoints?.default.supportedMimeTypes ??
defaultFileConfig?.endpoints?.[endpoint]?.supportedMimeTypes;

if (!defaultFileConfig.checkType(file.mimetype, supportedTypes)) {
return cb(new Error('Unsupported file type: ' + file.mimetype), false);
}

cb(null, true);
};

cb(null, true);
return fileFilter;
};

const createMulterInstance = async () => {
const customConfig = await getCustomConfig();
const fileConfig = mergeFileConfig(customConfig?.fileConfig);
const fileFilter = createFileFilter(fileConfig);
return multer({
storage,
fileFilter,
Expand Down
5 changes: 3 additions & 2 deletions api/server/services/Config/loadConfigEndpoints.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
const { EModelEndpoint, extractEnvVariable } = require('librechat-data-provider');
const { isUserProvided, normalizeEndpointName } = require('~/server/utils');
const { getCustomConfig } = require('./getCustomConfig');
const { isUserProvided } = require('~/server/utils');

/**
* Load config endpoints from the cached configuration object
Expand Down Expand Up @@ -29,7 +29,8 @@ async function loadConfigEndpoints(req) {

for (let i = 0; i < customEndpoints.length; i++) {
const endpoint = customEndpoints[i];
const { baseURL, apiKey, name, iconURL, modelDisplayLabel } = endpoint;
const { baseURL, apiKey, name: configName, iconURL, modelDisplayLabel } = endpoint;
const name = normalizeEndpointName(configName);

const resolvedApiKey = extractEnvVariable(apiKey);
const resolvedBaseURL = extractEnvVariable(baseURL);
Expand Down
11 changes: 1 addition & 10 deletions api/server/services/Config/loadConfigModels.js
Original file line number Diff line number Diff line change
@@ -1,16 +1,7 @@
const { Providers } = require('@librechat/agents');
const { EModelEndpoint, extractEnvVariable } = require('librechat-data-provider');
const { isUserProvided, normalizeEndpointName } = require('~/server/utils');
const { fetchModels } = require('~/server/services/ModelService');
const { getCustomConfig } = require('./getCustomConfig');
const { isUserProvided } = require('~/server/utils');

/**
* @param {string} name
* @returns {string}
*/
function normalizeEndpointName(name = '') {
return name.toLowerCase() === Providers.OLLAMA ? Providers.OLLAMA : name;
}

/**
* Load config endpoints from the cached configuration object
Expand Down
2 changes: 2 additions & 0 deletions api/server/services/Endpoints/bedrock/options.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ const getOptions = async ({ req, endpointOption }) => {
const {
BEDROCK_AWS_SECRET_ACCESS_KEY,
BEDROCK_AWS_ACCESS_KEY_ID,
BEDROCK_AWS_SESSION_TOKEN,
BEDROCK_REVERSE_PROXY,
BEDROCK_AWS_DEFAULT_REGION,
PROXY,
Expand All @@ -24,6 +25,7 @@ const getOptions = async ({ req, endpointOption }) => {
: {
accessKeyId: BEDROCK_AWS_ACCESS_KEY_ID,
secretAccessKey: BEDROCK_AWS_SECRET_ACCESS_KEY,
...(BEDROCK_AWS_SESSION_TOKEN && { sessionToken: BEDROCK_AWS_SESSION_TOKEN }),
};

if (!credentials) {
Expand Down
11 changes: 9 additions & 2 deletions api/server/services/Files/VectorDB/crud.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ const fs = require('fs');
const axios = require('axios');
const FormData = require('form-data');
const { FileSources } = require('librechat-data-provider');
const { logAxiosError } = require('~/utils');
const { logger } = require('~/config');

/**
Expand Down Expand Up @@ -32,7 +33,10 @@ const deleteVectors = async (req, file) => {
data: [file.file_id],
});
} catch (error) {
logger.error('Error deleting vectors', error);
logAxiosError({
error,
message: 'Error deleting vectors',
});
throw new Error(error.message || 'An error occurred during file deletion.');
}
};
Expand Down Expand Up @@ -91,7 +95,10 @@ async function uploadVectors({ req, file, file_id }) {
embedded: Boolean(responseData.known_type),
};
} catch (error) {
logger.error('Error embedding file', error);
logAxiosError({
error,
message: 'Error uploading vectors',
});
throw new Error(error.message || 'An error occurred during file upload.');
}
}
Expand Down
15 changes: 13 additions & 2 deletions api/server/utils/handleText.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const {
defaultRetrievalModels,
defaultAssistantsVersion,
} = require('librechat-data-provider');
const { Providers } = require('@librechat/agents');
const { getCitations, citeText } = require('./citations');
const partialRight = require('lodash/partialRight');
const { sendMessage } = require('./streamResponse');
Expand Down Expand Up @@ -212,13 +213,23 @@ function generateConfig(key, baseURL, endpoint) {
return config;
}

/**
* Normalize the endpoint name to system-expected value.
* @param {string} name
* @returns {string}
*/
function normalizeEndpointName(name = '') {
return name.toLowerCase() === Providers.OLLAMA ? Providers.OLLAMA : name;
}

module.exports = {
createOnProgress,
isEnabled,
handleText,
formatSteps,
formatAction,
addSpaceIfNeeded,
isUserProvided,
generateConfig,
addSpaceIfNeeded,
createOnProgress,
normalizeEndpointName,
};
12 changes: 11 additions & 1 deletion client/src/components/Plugins/Store/PluginAuthForm.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,18 @@ function PluginAuthForm({ plugin, onSubmit, isEntityTool }: TPluginAuthFormProps
})}
<button
disabled={!isDirty || !isValid || isSubmitting}
type="submit"
type="button"
className="btn btn-primary relative"
onClick={() => {
handleSubmit((auth) =>
onSubmit({
pluginKey: plugin?.pluginKey ?? '',
action: 'install',
auth,
isEntityTool,
}),
)();
}}
>
<div className="flex items-center justify-center gap-2">
{localize('com_ui_save')}
Expand Down
154 changes: 84 additions & 70 deletions client/src/hooks/Files/useFileHandling.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { v4 } from 'uuid';
import debounce from 'lodash/debounce';
import { useQueryClient } from '@tanstack/react-query';
import { useState, useEffect, useCallback, useRef } from 'react';
import { useState, useEffect, useCallback, useRef, useMemo } from 'react';
import {
megabyte,
QueryKeys,
Expand Down Expand Up @@ -47,14 +47,24 @@ const useFileHandling = (params?: UseFileHandling) => {
const agent_id = params?.additionalMetadata?.agent_id ?? '';
const assistant_id = params?.additionalMetadata?.assistant_id ?? '';

const { data: fileConfig = defaultFileConfig } = useGetFileConfig({
const { data: fileConfig = null } = useGetFileConfig({
select: (data) => mergeFileConfig(data),
});
const endpoint =
params?.overrideEndpoint ?? conversation?.endpointType ?? conversation?.endpoint ?? 'default';

const { fileLimit, fileSizeLimit, totalSizeLimit, supportedMimeTypes } =
fileConfig.endpoints[endpoint] ?? fileConfig.endpoints.default;
const endpoint = useMemo(
() =>
params?.overrideEndpoint ?? conversation?.endpointType ?? conversation?.endpoint ?? 'default',
[params?.overrideEndpoint, conversation?.endpointType, conversation?.endpoint],
);

const { fileLimit, fileSizeLimit, totalSizeLimit, supportedMimeTypes } = useMemo(
() =>
fileConfig?.endpoints[endpoint] ??
fileConfig?.endpoints.default ??
defaultFileConfig.endpoints[endpoint] ??
defaultFileConfig.endpoints.default,
[fileConfig, endpoint],
);

const displayToast = useCallback(() => {
if (errors.length > 1) {
Expand Down Expand Up @@ -146,6 +156,7 @@ const useFileHandling = (params?: UseFileHandling) => {
startUploadTimer(extendedFile.file_id, filename, extendedFile.size);

const formData = new FormData();
formData.append('endpoint', endpoint);
formData.append('file', extendedFile.file as File, encodeURIComponent(filename));
formData.append('file_id', extendedFile.file_id);

Expand All @@ -167,8 +178,6 @@ const useFileHandling = (params?: UseFileHandling) => {
}
}

formData.append('endpoint', endpoint);

if (!isAssistantsEndpoint(endpoint)) {
uploadFile.mutate(formData);
return;
Expand Down Expand Up @@ -203,81 +212,86 @@ const useFileHandling = (params?: UseFileHandling) => {
uploadFile.mutate(formData);
};

const validateFiles = (fileList: File[]) => {
const existingFiles = Array.from(files.values());
const incomingTotalSize = fileList.reduce((total, file) => total + file.size, 0);
if (incomingTotalSize === 0) {
setError('com_error_files_empty');
return false;
}
const currentTotalSize = existingFiles.reduce((total, file) => total + file.size, 0);

if (fileList.length + files.size > fileLimit) {
setError(`You can only upload up to ${fileLimit} files at a time.`);
return false;
}

for (let i = 0; i < fileList.length; i++) {
let originalFile = fileList[i];
let fileType = originalFile.type;
const extension = originalFile.name.split('.').pop() ?? '';
const knownCodeType = codeTypeMapping[extension];

// Infer MIME type for Known Code files when the type is empty or a mismatch
if (knownCodeType && (!fileType || fileType !== knownCodeType)) {
fileType = knownCodeType;
const validateFiles = useCallback(
(fileList: File[]) => {
const existingFiles = Array.from(files.values());
const incomingTotalSize = fileList.reduce((total, file) => total + file.size, 0);
if (incomingTotalSize === 0) {
setError('com_error_files_empty');
return false;
}
const currentTotalSize = existingFiles.reduce((total, file) => total + file.size, 0);

// Check if the file type is still empty after the extension check
if (!fileType) {
setError('Unable to determine file type for: ' + originalFile.name);
if (fileList.length + files.size > fileLimit) {
setError(`You can only upload up to ${fileLimit} files at a time.`);
return false;
}

// Replace empty type with inferred type
if (originalFile.type !== fileType) {
const newFile = new File([originalFile], originalFile.name, { type: fileType });
originalFile = newFile;
fileList[i] = newFile;
for (let i = 0; i < fileList.length; i++) {
let originalFile = fileList[i];
let fileType = originalFile.type;
const extension = originalFile.name.split('.').pop() ?? '';
const knownCodeType = codeTypeMapping[extension];

// Infer MIME type for Known Code files when the type is empty or a mismatch
if (knownCodeType && (!fileType || fileType !== knownCodeType)) {
fileType = knownCodeType;
}

// Check if the file type is still empty after the extension check
if (!fileType) {
setError('Unable to determine file type for: ' + originalFile.name);
return false;
}

// Replace empty type with inferred type
if (originalFile.type !== fileType) {
const newFile = new File([originalFile], originalFile.name, { type: fileType });
originalFile = newFile;
fileList[i] = newFile;
}

if (!checkType(originalFile.type, supportedMimeTypes)) {
console.log(originalFile);
setError('Currently, unsupported file type: ' + originalFile.type);
return false;
}

if (originalFile.size >= fileSizeLimit) {
setError(`File size exceeds ${fileSizeLimit / megabyte} MB.`);
return false;
}
}

if (!checkType(originalFile.type, supportedMimeTypes)) {
console.log(originalFile);
setError('Currently, unsupported file type: ' + originalFile.type);
if (currentTotalSize + incomingTotalSize > totalSizeLimit) {
setError(`The total size of the files cannot exceed ${totalSizeLimit / megabyte} MB.`);
return false;
}

if (originalFile.size >= fileSizeLimit) {
setError(`File size exceeds ${fileSizeLimit / megabyte} MB.`);
const combinedFilesInfo = [
...existingFiles.map(
(file) =>
`${file.file?.name ?? file.filename}-${file.size}-${
file.type?.split('/')[0] ?? 'file'
}`,
),
...fileList.map(
(file: File | undefined) =>
`${file?.name}-${file?.size}-${file?.type.split('/')[0] ?? 'file'}`,
),
];

const uniqueFilesSet = new Set(combinedFilesInfo);

if (uniqueFilesSet.size !== combinedFilesInfo.length) {
setError('com_error_files_dupe');
return false;
}
}

if (currentTotalSize + incomingTotalSize > totalSizeLimit) {
setError(`The total size of the files cannot exceed ${totalSizeLimit / megabyte} MB.`);
return false;
}

const combinedFilesInfo = [
...existingFiles.map(
(file) =>
`${file.file?.name ?? file.filename}-${file.size}-${file.type?.split('/')[0] ?? 'file'}`,
),
...fileList.map(
(file: File | undefined) =>
`${file?.name}-${file?.size}-${file?.type.split('/')[0] ?? 'file'}`,
),
];

const uniqueFilesSet = new Set(combinedFilesInfo);

if (uniqueFilesSet.size !== combinedFilesInfo.length) {
setError('com_error_files_dupe');
return false;
}

return true;
};
return true;
},
[files, fileLimit, fileSizeLimit, totalSizeLimit, supportedMimeTypes],
);

const loadImage = (extendedFile: ExtendedFile, preview: string) => {
const img = new Image();
Expand Down
Loading

0 comments on commit 0569315

Please sign in to comment.