8
8
S3Client ,
9
9
S3ClientConfig
10
10
} from '@aws-sdk/client-s3'
11
+ import { Storage } from '@google-cloud/storage'
11
12
import { Readable } from 'node:stream'
12
13
import { getUserHome } from './utils'
13
14
import sanitize from 'sanitize-filename'
@@ -34,6 +35,25 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st
34
35
} )
35
36
await s3Client . send ( putObjCmd )
36
37
38
+ fileNames . push ( sanitizedFilename )
39
+ return 'FILE-STORAGE::' + JSON . stringify ( fileNames )
40
+ } else if ( storageType === 'gcs' ) {
41
+ const { bucket } = getGcsClient ( )
42
+ const splitDataURI = fileBase64 . split ( ',' )
43
+ const filename = splitDataURI . pop ( ) ?. split ( ':' ) [ 1 ] ?? ''
44
+ const bf = Buffer . from ( splitDataURI . pop ( ) || '' , 'base64' )
45
+ const mime = splitDataURI [ 0 ] . split ( ':' ) [ 1 ] . split ( ';' ) [ 0 ]
46
+ const sanitizedFilename = _sanitizeFilename ( filename )
47
+ const normalizedChatflowid = chatflowid . replace ( / \\ / g, '/' )
48
+ const normalizedFilename = sanitizedFilename . replace ( / \\ / g, '/' )
49
+ const filePath = `${ normalizedChatflowid } /${ normalizedFilename } `
50
+ const file = bucket . file ( filePath )
51
+ await new Promise < void > ( ( resolve , reject ) => {
52
+ file . createWriteStream ( { contentType : mime , metadata : { contentEncoding : 'base64' } } )
53
+ . on ( 'error' , ( err ) => reject ( err ) )
54
+ . on ( 'finish' , ( ) => resolve ( ) )
55
+ . end ( bf )
56
+ } )
37
57
fileNames . push ( sanitizedFilename )
38
58
return 'FILE-STORAGE::' + JSON . stringify ( fileNames )
39
59
} else {
@@ -76,6 +96,20 @@ export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName:
76
96
await s3Client . send ( putObjCmd )
77
97
fileNames . push ( sanitizedFilename )
78
98
return 'FILE-STORAGE::' + JSON . stringify ( fileNames )
99
+ } else if ( storageType === 'gcs' ) {
100
+ const { bucket } = getGcsClient ( )
101
+ const normalizedPaths = paths . map ( ( p ) => p . replace ( / \\ / g, '/' ) )
102
+ const normalizedFilename = sanitizedFilename . replace ( / \\ / g, '/' )
103
+ const filePath = [ ...normalizedPaths , normalizedFilename ] . join ( '/' )
104
+ const file = bucket . file ( filePath )
105
+ await new Promise < void > ( ( resolve , reject ) => {
106
+ file . createWriteStream ( )
107
+ . on ( 'error' , ( err ) => reject ( err ) )
108
+ . on ( 'finish' , ( ) => resolve ( ) )
109
+ . end ( bf )
110
+ } )
111
+ fileNames . push ( sanitizedFilename )
112
+ return 'FILE-STORAGE::' + JSON . stringify ( fileNames )
79
113
} else {
80
114
const dir = path . join ( getStoragePath ( ) , ...paths . map ( _sanitizeFilename ) )
81
115
if ( ! fs . existsSync ( dir ) ) {
@@ -109,6 +143,19 @@ export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName:
109
143
} )
110
144
await s3Client . send ( putObjCmd )
111
145
return 'FILE-STORAGE::' + sanitizedFilename
146
+ } else if ( storageType === 'gcs' ) {
147
+ const { bucket } = getGcsClient ( )
148
+ const normalizedPaths = paths . map ( ( p ) => p . replace ( / \\ / g, '/' ) )
149
+ const normalizedFilename = sanitizedFilename . replace ( / \\ / g, '/' )
150
+ const filePath = [ ...normalizedPaths , normalizedFilename ] . join ( '/' )
151
+ const file = bucket . file ( filePath )
152
+ await new Promise < void > ( ( resolve , reject ) => {
153
+ file . createWriteStream ( { contentType : mime , metadata : { contentEncoding : 'base64' } } )
154
+ . on ( 'error' , ( err ) => reject ( err ) )
155
+ . on ( 'finish' , ( ) => resolve ( ) )
156
+ . end ( bf )
157
+ } )
158
+ return 'FILE-STORAGE::' + sanitizedFilename
112
159
} else {
113
160
const dir = path . join ( getStoragePath ( ) , ...paths . map ( _sanitizeFilename ) )
114
161
if ( ! fs . existsSync ( dir ) ) {
@@ -146,6 +193,11 @@ export const getFileFromUpload = async (filePath: string): Promise<Buffer> => {
146
193
// @ts -ignore
147
194
const buffer = Buffer . concat ( response . Body . toArray ( ) )
148
195
return buffer
196
+ } else if ( storageType === 'gcs' ) {
197
+ const { bucket } = getGcsClient ( )
198
+ const file = bucket . file ( filePath )
199
+ const [ buffer ] = await file . download ( )
200
+ return buffer
149
201
} else {
150
202
return fs . readFileSync ( filePath )
151
203
}
@@ -179,6 +231,14 @@ export const getFileFromStorage = async (file: string, ...paths: string[]): Prom
179
231
// @ts -ignore
180
232
const buffer = Buffer . concat ( response . Body . toArray ( ) )
181
233
return buffer
234
+ } else if ( storageType === 'gcs' ) {
235
+ const { bucket } = getGcsClient ( )
236
+ const normalizedPaths = paths . map ( ( p ) => p . replace ( / \\ / g, '/' ) )
237
+ const normalizedFilename = sanitizedFilename . replace ( / \\ / g, '/' )
238
+ const filePath = [ ...normalizedPaths , normalizedFilename ] . join ( '/' )
239
+ const file = bucket . file ( filePath )
240
+ const [ buffer ] = await file . download ( )
241
+ return buffer
182
242
} else {
183
243
const fileInStorage = path . join ( getStoragePath ( ) , ...paths . map ( _sanitizeFilename ) , sanitizedFilename )
184
244
return fs . readFileSync ( fileInStorage )
@@ -208,6 +268,10 @@ export const removeFilesFromStorage = async (...paths: string[]) => {
208
268
Key = Key . substring ( 1 )
209
269
}
210
270
await _deleteS3Folder ( Key )
271
+ } else if ( storageType === 'gcs' ) {
272
+ const { bucket } = getGcsClient ( )
273
+ const normalizedPath = paths . map ( ( p ) => p . replace ( / \\ / g, '/' ) ) . join ( '/' )
274
+ await bucket . deleteFiles ( { prefix : `${ normalizedPath } /` } )
211
275
} else {
212
276
const directory = path . join ( getStoragePath ( ) , ...paths . map ( _sanitizeFilename ) )
213
277
_deleteLocalFolderRecursive ( directory )
@@ -223,6 +287,9 @@ export const removeSpecificFileFromUpload = async (filePath: string) => {
223
287
Key = Key . substring ( 1 )
224
288
}
225
289
await _deleteS3Folder ( Key )
290
+ } else if ( storageType === 'gcs' ) {
291
+ const { bucket } = getGcsClient ( )
292
+ await bucket . file ( filePath ) . delete ( )
226
293
} else {
227
294
fs . unlinkSync ( filePath )
228
295
}
@@ -237,6 +304,15 @@ export const removeSpecificFileFromStorage = async (...paths: string[]) => {
237
304
Key = Key . substring ( 1 )
238
305
}
239
306
await _deleteS3Folder ( Key )
307
+ } else if ( storageType === 'gcs' ) {
308
+ const { bucket } = getGcsClient ( )
309
+ const fileName = paths . pop ( )
310
+ if ( fileName ) {
311
+ const sanitizedFilename = _sanitizeFilename ( fileName )
312
+ paths . push ( sanitizedFilename )
313
+ }
314
+ const normalizedPath = paths . map ( ( p ) => p . replace ( / \\ / g, '/' ) ) . join ( '/' )
315
+ await bucket . file ( normalizedPath ) . delete ( )
240
316
} else {
241
317
const fileName = paths . pop ( )
242
318
if ( fileName ) {
@@ -257,6 +333,10 @@ export const removeFolderFromStorage = async (...paths: string[]) => {
257
333
Key = Key . substring ( 1 )
258
334
}
259
335
await _deleteS3Folder ( Key )
336
+ } else if ( storageType === 'gcs' ) {
337
+ const { bucket } = getGcsClient ( )
338
+ const normalizedPath = paths . map ( ( p ) => p . replace ( / \\ / g, '/' ) ) . join ( '/' )
339
+ await bucket . deleteFiles ( { prefix : `${ normalizedPath } /` } )
260
340
} else {
261
341
const directory = path . join ( getStoragePath ( ) , ...paths . map ( _sanitizeFilename ) )
262
342
_deleteLocalFolderRecursive ( directory , true )
@@ -355,6 +435,14 @@ export const streamStorageFile = async (
355
435
const blob = await body . transformToByteArray ( )
356
436
return Buffer . from ( blob )
357
437
}
438
+ } else if ( storageType === 'gcs' ) {
439
+ const { bucket } = getGcsClient ( )
440
+ const normalizedChatflowId = chatflowId . replace ( / \\ / g, '/' )
441
+ const normalizedChatId = chatId . replace ( / \\ / g, '/' )
442
+ const normalizedFilename = sanitizedFilename . replace ( / \\ / g, '/' )
443
+ const filePath = `${ normalizedChatflowId } /${ normalizedChatId } /${ normalizedFilename } `
444
+ const [ buffer ] = await bucket . file ( filePath ) . download ( )
445
+ return buffer
358
446
} else {
359
447
const filePath = path . join ( getStoragePath ( ) , chatflowId , chatId , sanitizedFilename )
360
448
//raise error if file path is not absolute
@@ -372,6 +460,28 @@ export const streamStorageFile = async (
372
460
}
373
461
}
374
462
463
+ export const getGcsClient = ( ) => {
464
+ const pathToGcsCredential = process . env . GOOGLE_CLOUD_STORAGE_CREDENTIAL
465
+ const projectId = process . env . GOOGLE_CLOUD_STORAGE_PROJ_ID
466
+ const bucketName = process . env . GOOGLE_CLOUD_STORAGE_BUCKET_NAME
467
+
468
+ if ( ! pathToGcsCredential ) {
469
+ throw new Error ( 'GOOGLE_CLOUD_STORAGE_CREDENTIAL env variable is required' )
470
+ }
471
+ if ( ! bucketName ) {
472
+ throw new Error ( 'GOOGLE_CLOUD_STORAGE_BUCKET_NAME env variable is required' )
473
+ }
474
+
475
+ const storageConfig = {
476
+ keyFilename : pathToGcsCredential ,
477
+ ...( projectId ? { projectId } : { } )
478
+ }
479
+
480
+ const storage = new Storage ( storageConfig )
481
+ const bucket = storage . bucket ( bucketName )
482
+ return { storage, bucket }
483
+ }
484
+
375
485
export const getS3Config = ( ) => {
376
486
const accessKeyId = process . env . S3_STORAGE_ACCESS_KEY_ID
377
487
const secretAccessKey = process . env . S3_STORAGE_SECRET_ACCESS_KEY
0 commit comments