Skip to content

Commit eaf59db

Browse files
schiwekMKoblerS
andauthored
Support srv.run(INSERT.into(Attachments).entries(...)) (#305)
The PR: - cleans up the put implementations - supports `srv.run(INSERT.into(Attachments).entries(...))` that developers can insert attachments themselves at runtime using the regular CAP APIs. To support it the put functions are leveraged. --------- Co-authored-by: Simon Kobler <32038731+KoblerS@users.noreply.github.com>
1 parent c46c38c commit eaf59db

File tree

13 files changed

+361
-412
lines changed

13 files changed

+361
-412
lines changed

jest.config.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
const config = {
2-
testTimeout: 60000,
2+
testTimeout: 120000,
33
testMatch: ['**/*.test.js'],
44
setupFilesAfterEnv: ['<rootDir>/tests/setup.js'],
55
forceExit: true,

lib/aws-s3.js

Lines changed: 6 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -164,14 +164,13 @@ module.exports = class AWSAttachmentsService extends require("./object-store.js"
164164
/**
165165
* @inheritdoc
166166
*/
167-
async put(attachments, data, _content, isDraftEnabled) {
167+
async put(attachments, data) {
168168
const startTime = Date.now()
169169

170170
const tenantID = cds.context.tenant
171171

172172
logConfig.processStep('Starting file upload to S3', {
173173
attachmentEntity: attachments.name,
174-
isDraftEnabled,
175174
tenant: tenantID
176175
})
177176

@@ -194,11 +193,11 @@ module.exports = class AWSAttachmentsService extends require("./object-store.js"
194193
filenames: data.map(d => d.filename)
195194
})
196195
return Promise.all(
197-
data.map((d) => this.put(attachments, d, _content, isDraftEnabled))
196+
data.map((d) => this.put(attachments, d))
198197
)
199198
}
200199

201-
const { content = _content, ...metadata } = data
200+
const { content, ...metadata } = data
202201
const Key = metadata.url
203202

204203
if (!Key) {
@@ -235,12 +234,9 @@ module.exports = class AWSAttachmentsService extends require("./object-store.js"
235234
params: input,
236235
})
237236

238-
const stored = super.put(attachments, metadata, null, isDraftEnabled)
239-
240-
await Promise.all([stored, multipartUpload.done()])
241-
242-
const hash = await utils.computeHash(await this.get(attachments, { ID: metadata.ID }))
243-
await super.update(attachments, { ID: metadata.ID }, { hash })
237+
// The file upload has to be done first, so super.put can compute the hash
238+
await multipartUpload.done()
239+
await super.put(attachments, metadata)
244240

245241
const duration = Date.now() - startTime
246242
logConfig.debug('File upload to S3 completed successfully', {
@@ -347,55 +343,6 @@ module.exports = class AWSAttachmentsService extends require("./object-store.js"
347343
}
348344
}
349345

350-
/**
351-
* Registers attachment handlers for the given service and entity
352-
* @param {import('@sap/cds').Request} req - The request object
353-
* @param {import('express').NextFunction} next - The next middleware function
354-
*/
355-
async updateContentHandler(req, next) {
356-
logConfig.debug(`[AWS S3] Uploading file using updateContentHandler for ${req.target.name}`)
357-
358-
// Check separate object store instances
359-
if (separateObjectStore) {
360-
const tenantID = cds.context.tenant
361-
await this.createClientS3(tenantID)
362-
}
363-
364-
const targetID = req.data.ID || req.params[1]?.ID || req.params[1]
365-
if (!targetID) {
366-
req.reject(400, "Missing ID in request")
367-
}
368-
369-
if (req?.data?.content) {
370-
const response = await SELECT.from(req.target, { ID: targetID }).columns("url")
371-
if (response?.url) {
372-
const multipartUpload = new Upload({
373-
client: this.client,
374-
params: {
375-
Bucket: this.bucket,
376-
Key: response.url,
377-
Body: req.data.content,
378-
},
379-
})
380-
await multipartUpload.done()
381-
382-
const hash = await utils.computeHash(await this.get(req.target, { ID: targetID }))
383-
await super.update(req.target, { ID: targetID }, { hash })
384-
385-
const MalwareScanner = await cds.connect.to('malwareScanner')
386-
await MalwareScanner.emit('ScanFile', { target: req.target.name, keys: { ID: targetID } })
387-
388-
logConfig.debug(`[AWS S3] Uploaded file using updateContentHandler for ${req.target.name}`)
389-
}
390-
} else if (req?.data?.note) {
391-
const key = { ID: targetID }
392-
await super.update(req.target, key, { note: req.data.note })
393-
logConfig.debug(`[AWS S3] Updated file upload with note for ${req.target.name}`)
394-
} else {
395-
next()
396-
}
397-
}
398-
399346
/**
400347
* Deletes a file from S3 based on the provided key
401348
* @param {string} Key - The key of the file to delete

lib/azure-blob-storage.js

Lines changed: 30 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -34,11 +34,6 @@ module.exports = class AzureAttachmentsService extends require("./object-store")
3434
const creds = cds.env.requires?.objectStore?.credentials
3535

3636
if (!creds) {
37-
if (Object.keys(creds).includes('access_key_id')) {
38-
throw new Error('AWS S3 credentials found where Azure Blob Storage credentials expected, please check your service bindings.')
39-
} else if (Object.keys(creds).includes('projectId')) {
40-
throw new Error('Google Cloud Platform credentials found where Azure Blob Storage credentials expected, please check your service bindings.')
41-
}
4237
throw new Error("SAP Object Store instance is not bound.")
4338
}
4439

@@ -151,13 +146,12 @@ module.exports = class AzureAttachmentsService extends require("./object-store")
151146
/**
152147
* @inheritdoc
153148
*/
154-
async put(attachments, data, isDraftEnabled, _content, req) {
149+
async put(attachments, data) {
155150
const startTime = Date.now()
156151

157152
logConfig.processStep('Starting file upload to Azure Blob Storage', {
158153
attachmentEntity: attachments.name,
159-
isDraftEnabled,
160-
tenant: req?.tenant
154+
tenant: cds.context.tenant
161155
})
162156

163157
try {
@@ -180,26 +174,26 @@ module.exports = class AzureAttachmentsService extends require("./object-store")
180174
filenames: data.map(d => d.filename)
181175
})
182176
return Promise.all(
183-
data.map((d) => this.put(attachments, d, isDraftEnabled, _content, req))
177+
data.map((d) => this.put(attachments, d))
184178
)
185179
}
186180

187-
const { content = _content, ...metadata } = data
181+
let { content: _content, ...metadata } = data
188182
const blobName = metadata.url
189183

190184
if (!blobName) {
191185
logConfig.withSuggestion('error',
192186
'File key/URL is required for Azure Blob Storage upload', null,
193187
'Ensure attachment data includes a valid URL/key',
194-
{ metadata: { ...metadata, content: !!content } })
188+
{ metadata: { ...metadata, content: !!_content } })
195189
throw new Error('File key is required for upload')
196190
}
197191

198-
if (!content) {
192+
if (!_content) {
199193
logConfig.withSuggestion('error',
200194
'File content is required for Azure Blob Storage upload', null,
201195
'Ensure attachment data includes file content',
202-
{ key: blobName, hasContent: !!content })
196+
{ key: blobName, hasContent: !!_content })
203197
throw new Error('File content is required for upload')
204198
}
205199

@@ -209,11 +203,31 @@ module.exports = class AzureAttachmentsService extends require("./object-store")
209203
containerName: this.containerName,
210204
blobName,
211205
filename: metadata.filename,
212-
contentSize: content.length || content.size || 'unknown'
206+
contentSize: _content.length || _content.size || 'unknown'
213207
})
214208

215-
const stored = super.put(attachments, metadata, null, isDraftEnabled)
216-
await Promise.all([stored, blobClient.uploadData(content)])
209+
// Handle different content types for update
210+
let contentLength
211+
const content = _content
212+
if (Buffer.isBuffer(content)) {
213+
contentLength = content.length
214+
} else if (content && typeof content.length === 'number') {
215+
contentLength = content.length
216+
} else if (content && typeof content.size === 'number') {
217+
contentLength = content.size
218+
} else {
219+
// Convert to buffer if needed
220+
const chunks = []
221+
for await (const chunk of content) {
222+
chunks.push(chunk)
223+
}
224+
_content = Buffer.concat(chunks)
225+
contentLength = _content.length
226+
}
227+
228+
// The file upload has to be done first, so super.put can compute the hash
229+
await blobClient.upload(_content, contentLength)
230+
await super.put(attachments, metadata)
217231

218232
const duration = Date.now() - startTime
219233
logConfig.debug('File upload to Azure Blob Storage completed successfully', {
@@ -317,68 +331,6 @@ module.exports = class AzureAttachmentsService extends require("./object-store")
317331
}
318332
}
319333

320-
/**
321-
* Registers attachment handlers for the given service and entity
322-
* @param {import('@sap/cds').Request} req - The request object
323-
* @param {import('express').NextFunction} next - The next middleware function
324-
*/
325-
async updateContentHandler(req, next) {
326-
logConfig.debug(`[Azure] Uploading file using updateContentHandler for ${req.target.name}`)
327-
// Check separate object store instances
328-
if (separateObjectStore) {
329-
const tenantID = cds.context.tenant
330-
await this.createAzureClient(tenantID)
331-
}
332-
333-
const targetID = req.data.ID || req.params[1]?.ID || req.params[1]
334-
if (!targetID) {
335-
req.reject(400, "Missing ID in request")
336-
}
337-
338-
if (req?.data?.content) {
339-
const response = await SELECT.from(req.target, { ID: targetID }).columns("url")
340-
if (response?.url) {
341-
const blobName = response.url
342-
const blobClient = this.containerClient.getBlockBlobClient(blobName)
343-
344-
// Handle different content types for update
345-
let contentLength
346-
const content = req.data.content
347-
if (Buffer.isBuffer(content)) {
348-
contentLength = content.length
349-
} else if (content && typeof content.length === 'number') {
350-
contentLength = content.length
351-
} else if (content && typeof content.size === 'number') {
352-
contentLength = content.size
353-
} else {
354-
// Convert to buffer if needed
355-
const chunks = []
356-
for await (const chunk of content) {
357-
chunks.push(chunk)
358-
}
359-
req.data.content = Buffer.concat(chunks)
360-
contentLength = req.data.content.length
361-
}
362-
363-
await blobClient.upload(req.data.content, contentLength)
364-
365-
const hash = await utils.computeHash(await this.get(req.target, { ID: targetID }))
366-
await super.update(req.target, { ID: targetID }, { hash })
367-
368-
const MalwareScanner = await cds.connect.to('malwareScanner')
369-
await MalwareScanner.emit('ScanFile', { target: req.target.name, keys: { ID: targetID } })
370-
371-
logConfig.debug(`[Azure] Uploaded file using updateContentHandler for ${req.target.name}`)
372-
}
373-
} else if (req?.data?.note) {
374-
const key = { ID: targetID }
375-
await super.update(req.target, key, { note: req.data.note })
376-
logConfig.debug(`[Azure] Updated file upload with note for ${req.target.name}`)
377-
} else {
378-
next()
379-
}
380-
}
381-
382334
/**
383335
* Deletes a file from Azure Blob Storage
384336
* @param {string} Key - The key of the file to delete

0 commit comments

Comments
 (0)