diff --git a/src/source/delete.js b/src/source/delete.js index 3f199bf..a8f567e 100644 --- a/src/source/delete.js +++ b/src/source/delete.js @@ -12,31 +12,76 @@ import { Response } from '@adobe/fetch'; import { HelixStorage } from '@adobe/helix-shared-storage'; import { createErrorResponse } from '../contentbus/utils.js'; -import { deleteFolder } from './folder.js'; -import { getS3KeyFromInfo } from './utils.js'; +import { RequestInfo } from '../support/RequestInfo.js'; +import { CopyOptions, copyDocument, copyFolder } from './source-client.js'; +import { getDocPathFromS3Key, getS3Key, getS3KeyFromInfo } from './s3-path-utils.js'; /** - * Delete from the source bus. + * Trash a folder by moving all of its contents to the trash in the same folder structure. + * If the trash already contains a folder with this name, a base-36 encoded timestamp is appended. * * @param {import('../support/AdminContext').AdminContext} context context * @param {import('../support/RequestInfo').RequestInfo} info request info - * @return {Promise} response + * @returns {Promise} response, status 204 if successful. + */ +async function trashFolder(context, info) { + const bucket = HelixStorage.fromContext(context).sourceBus(); + + const destDir = `/.trash/${info.rawPath.split('/').at(-2)}`; + + // Ensure that there is no folder in the trash with this name yet + const listResp = await bucket.list(`${getS3Key(info.org, info.site, destDir)}/`, { shallow: true }); + const destPath = listResp.length > 0 ? `${destDir}-${Date.now().toString(36)}/` : `${destDir}/`; + + const srcKey = getS3Key(info.org, info.site, info.rawPath); + const newInfo = RequestInfo.clone(info, { path: destPath }); + const copyOpts = (sKey) => ({ addMetadata: { 'doc-path': getDocPathFromS3Key(sKey) } }); + + try { + await copyFolder(context, new CopyOptions({ + src: srcKey, info: newInfo, move: true, fnOpts: copyOpts, collOpts: { collision: 'unique' }, + })); + return new Response('', { status: 204 }); + } catch (e) { + const opts = { e, log: context.log }; + opts.status = e.$metadata?.httpStatusCode; + return createErrorResponse(opts); + } +} + +/** + * Delete from the source bus, which means moving it to the trash. Both + * documents and folders are supported. The trashed documents gets an extra + * metadata field 'doc-path' which is the path where it was deleted from. + * + * @param {import('../support/AdminContext').AdminContext} context context + * @param {import('../support/RequestInfo').RequestInfo} info request info + * @return {Promise} response, status 204 if successful. */ export async function deleteSource(context, info) { if (info.rawPath.endsWith('/')) { - return deleteFolder(context, info); + return trashFolder(context, info); } - const { log } = context; - const bucket = HelixStorage.fromContext(context).sourceBus(); - const key = getS3KeyFromInfo(info); + // Trash a document. + const docName = info.rawPath.split('/').pop(); + const srcKey = getS3KeyFromInfo(info); + const newInfo = RequestInfo.clone(info, { path: `/.trash/${docName}` }); + const copyOpts = { + addMetadata: { + 'doc-path': info.resourcePath, + }, + }; + const copyOptions = new CopyOptions({ + src: srcKey, info: newInfo, move: true, opts: copyOpts, collOpts: { collision: 'unique' }, + }); try { - const resp = await bucket.remove(key); - return new Response('', { status: resp.$metadata?.httpStatusCode }); + await copyDocument(context, copyOptions); + return new Response('', { status: 204 }); } catch (e) { - const opts = { e, log }; - opts.status = e.status; + const opts = { e, log: context.log }; + opts.status = e.$metadata?.httpStatusCode; return createErrorResponse(opts); } } diff --git a/src/source/folder.js b/src/source/folder.js index a175a29..e20d631 100644 --- a/src/source/folder.js +++ b/src/source/folder.js @@ -16,7 +16,9 @@ import { sanitizePath } from '@adobe/helix-shared-string'; import { createErrorResponse } from '../contentbus/utils.js'; import { splitExtension } from '../support/RequestInfo.js'; import { StatusCodeError } from '../support/StatusCodeError.js'; -import { getS3Key, storeSourceFile, CONTENT_TYPES } from './utils.js'; +import { getS3Key } from './s3-path-utils.js'; +import { storeSourceFile } from './source-client.js'; +import { CONTENT_TYPES } from './utils.js'; /** * A folder is marked by a marker file. This allows folder to show up in bucket diff --git a/src/source/get.js b/src/source/get.js index 3dfaf9a..52841b9 100644 --- a/src/source/get.js +++ b/src/source/get.js @@ -11,8 +11,9 @@ */ import { createErrorResponse } from '../contentbus/utils.js'; import { listFolder } from './folder.js'; -import { accessSourceFile, getS3KeyFromInfo } from './utils.js'; -import { getOrListVersions, VERSION_FOLDER } from './versions.js'; +import { accessSourceFile, VERSION_FOLDER } from './source-client.js'; +import { getS3KeyFromInfo } from './s3-path-utils.js'; +import { getOrListVersions } from './versions.js'; async function accessSource(context, info, headRequest) { if (info.rawPath.endsWith('/')) { diff --git a/src/source/post.js b/src/source/post.js index cb32f4c..4c9417a 100644 --- a/src/source/post.js +++ b/src/source/post.js @@ -12,13 +12,9 @@ import { createErrorResponse } from '../contentbus/utils.js'; import { createFolder } from './folder.js'; import { checkConditionals } from './header-utils.js'; -import { - contentTypeFromExtension, - getS3KeyFromInfo, - getValidPayload, - storeSourceFile, -} from './utils.js'; -import { postVersion, VERSION_FOLDER } from './versions.js'; +import { getS3KeyFromInfo } from './s3-path-utils.js'; +import { storeSourceFile, createVersion, VERSION_FOLDER } from './source-client.js'; +import { contentTypeFromExtension, getValidPayload } from './utils.js'; /** * Handle POST requests to the source bus. @@ -38,7 +34,7 @@ export async function postSource(context, info) { const operation = String(context.data.operation || ''); const comment = String(context.data.comment || ''); - return postVersion(context, baseKey, operation, comment); + return createVersion(context, baseKey, operation, comment); } try { diff --git a/src/source/put.js b/src/source/put.js index 81317d3..d975f1d 100644 --- a/src/source/put.js +++ b/src/source/put.js @@ -10,172 +10,13 @@ * governing permissions and limitations under the License. */ import { Response } from '@adobe/fetch'; -import processQueue from '@adobe/helix-shared-process-queue'; -import { HelixStorage } from '@adobe/helix-shared-storage'; -import { ulid } from 'ulid'; import { createErrorResponse } from '../contentbus/utils.js'; -import { StatusCodeError } from '../support/StatusCodeError.js'; import { checkConditionals } from './header-utils.js'; +import { getDocPathFromS3Key, getS3Key, getS3KeyFromInfo } from './s3-path-utils.js'; import { - contentTypeFromExtension, - getS3KeyFromInfo, - getS3Key, - getDocID, - getValidPayload, - storeSourceFile, - MAX_SOURCE_BUCKET_RETRY, -} from './utils.js'; -import { postVersion } from './versions.js'; - -/** - * Copy an S3 object and handle conflichts. - * - * @param {import('../support/AdminContext').AdminContext} context context - * @param {string} srcKey source S3 key - * @param {string} destKey destination S3 key - * @param {boolean} move true if this is a move operation - * @param {object} initialOpts metadata options for the copy operation - * @param {object} collOpts collision options (e.g { copy: 'overwrite' } ) - */ -async function copyWithRetry( - context, - srcKey, - destKey, - move, - initialOpts, - collOpts, -) { - const bucket = HelixStorage.fromContext(context).sourceBus(); - let opts = initialOpts; - - // We start with assuming that there is nothing at the destination, the happy path - let copyOpts = { IfNoneMatch: '*' }; - - const maxRetry = context.attributes.maxSourceBucketRetry ?? MAX_SOURCE_BUCKET_RETRY; - let attempt = 0; - while (true) { - try { - const allOpts = { copyOpts, ...opts }; - // eslint-disable-next-line no-await-in-loop - await bucket.copy(srcKey, destKey, allOpts); - - break; // copy was successful, break out of the loop - we're done! - } catch (e) { - attempt += 1; - if (attempt > maxRetry) { - throw e; - } - - const status = e.$metadata?.httpStatusCode; - - // As per S3 docs, retry on a 409 - if (status !== 409) { - if (status !== 412) { - throw e; - } - // 412: precondition failed - something is at the destination already. - - if (move) { - // TODO add move collision handling - throw new StatusCodeError('Collision: something is at the destination already', 409); - } else { - if (collOpts.copy !== 'overwrite') { - throw new StatusCodeError('Collision: something is at the destination already, no overwrite option provided', 409); - } - - // eslint-disable-next-line no-await-in-loop - const dest = await bucket.head(destKey); - - // version what's there before overwriting it, provide the destination ETag so that we - // know we're versioning what we just did a head() of. - // eslint-disable-next-line no-await-in-loop - const versionResp = await postVersion(context, destKey, 'copy', 'Version created before overwrite', dest.ETag); - if (versionResp.status !== 201) { - if (versionResp.status !== 412 && versionResp.status !== 409) { - throw new StatusCodeError('Failed to version the destination', versionResp.status); - } - } else { - // Creating the version was successful, so we can now copy over the destination. - - const getDestDocId = getDocID(dest); - - // If something is at the destination already, we copy over that file, but keep - // the doc ID from the destination as-is so that the destination keeps its history. - opts = { ...initialOpts, addMetadata: { 'doc-id': getDestDocId } }; - - // Now only copy over the destination if it's still the same as what we did a head() of - copyOpts = { IfMatch: dest.ETag }; - } - } - } - } - } - - if (move) { - const resp = await bucket.remove(srcKey); - if (resp.$metadata?.httpStatusCode !== 204) { - throw new StatusCodeError(`Failed to remove source: ${srcKey}`, resp.$metadata?.httpStatusCode); - } - } -} - -async function copyFile(context, srcKey, destKey, move, collOpts) { - const opts = {}; - if (!move) { - opts.addMetadata = { 'doc-id': ulid() }; - } - await copyWithRetry(context, srcKey, destKey, move, opts, collOpts); -} - -/** - * Copies a document from the source to the destination. - * - * @param {import('../support/AdminContext').AdminContext} context context - * @param {string} src source S3 key - * @param {import('../support/RequestInfo').RequestInfo} info destination info - * @param {boolean} move whether to move the source - * @param {object} collOpts collision options - * @returns {Promise>} the copied file details - */ -async function copyDocument(context, src, info, move, collOpts) { - const dst = getS3KeyFromInfo(info); - await copyFile(context, src, dst, move, collOpts); - return [{ src, dst }]; -} - -/** - * Copies a folder from the source to the destination. - * - * @param {import('../support/AdminContext').AdminContext} context context - * @param {string} srcKey source S3 key - * @param {import('../support/RequestInfo').RequestInfo} info destination info - * @param {boolean} move whether to move the source - * @param {object} collOpts collision options - * @returns {Promise>} the copied files - */ -async function copyFolder(context, srcKey, info, move, collOpts) { - const tasks = []; - const destKey = getS3Key(info.org, info.site, info.rawPath); - - if (destKey.startsWith(srcKey)) { - throw new StatusCodeError('Destination cannot be a subfolder of source', 400); - } - - const bucket = HelixStorage.fromContext(context).sourceBus(); - (await bucket.list(srcKey)).forEach((obj) => { - tasks.push({ - src: obj.key, - dst: `${destKey}${obj.path}`, - }); - }); - - const copied = []; - await processQueue(tasks, async (task) => { - await copyFile(context, task.src, task.dst, move, collOpts); - copied.push({ src: task.src, dst: task.dst }); - }); - return copied; -} + CopyOptions, copyFolder, copyDocument, storeSourceFile, +} from './source-client.js'; +import { contentTypeFromExtension, getValidPayload } from './utils.js'; /** * Copies a resource of a folder to the destination folder. If a folder is @@ -198,13 +39,22 @@ async function copySource(context, info, move, collOpts) { return createErrorResponse({ status: 400, msg: 'Source and destination type mismatch', log }); } + const copyOpts = new CopyOptions({ + src: srcKey, info, move, collOpts, + }); const copied = isFolder - ? await copyFolder(context, srcKey, info, move, collOpts) - : await copyDocument(context, srcKey, info, move, collOpts); + ? await copyFolder(context, copyOpts) + : await copyDocument(context, copyOpts); + + // The copied paths returned are without the org and site segments + const copiedPaths = copied.map((c) => ({ + src: getDocPathFromS3Key(c.src), + dst: getDocPathFromS3Key(c.dst), + })); const operation = move ? 'moved' : 'copied'; return new Response({ - [operation]: copied, + [operation]: copiedPaths, }); } catch (e) { const opts = { e, log }; @@ -223,12 +73,9 @@ async function copySource(context, info, move, collOpts) { export async function putSource(context, info) { if (context.data.source) { const move = String(context.data.move) === 'true'; - const collOpts = {}; - if (move) { - collOpts.move = context.data.collision; - } else { - collOpts.copy = context.data.collision; - } + const collOpts = { + collision: context.data.collision, + }; return copySource(context, info, move, collOpts); } diff --git a/src/source/s3-path-utils.js b/src/source/s3-path-utils.js new file mode 100644 index 0000000..b1b7ba2 --- /dev/null +++ b/src/source/s3-path-utils.js @@ -0,0 +1,45 @@ +/* + * Copyright 2025 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + +/** + * Get the S3 key from the organization, site, and path. + * + * @param {string} org organization + * @param {string} site site + * @param {string} path document path + * @returns {string} the S3 key + */ +export function getS3Key(org, site, path) { + return `${org}/${site}${path}`; +} + +/** + * Get the source bus key from the request info. + * + * @param {import('../support/RequestInfo').RequestInfo} info request info + * @return {string} the source bus path + */ +export function getS3KeyFromInfo(info) { + const { org, site, resourcePath } = info; + return getS3Key(org, site, resourcePath); +} + +/** + * Get the document path from the source bus S3 key. + * + * @param {string} sKey source bus S3 key + * @returns {string} the document path + */ +export function getDocPathFromS3Key(sKey) { + const path = sKey.split('/').slice(2).join('/'); + return `/${path}`; +} diff --git a/src/source/source-client.js b/src/source/source-client.js new file mode 100644 index 0000000..42b86ac --- /dev/null +++ b/src/source/source-client.js @@ -0,0 +1,447 @@ +/* + * Copyright 2026 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ +import { Response } from '@adobe/fetch'; +import processQueue from '@adobe/helix-shared-process-queue'; +import { HelixStorage } from '@adobe/helix-shared-storage'; +import { ulid } from 'ulid'; +import { createErrorResponse } from '../contentbus/utils.js'; +import { StatusCodeError } from '../support/StatusCodeError.js'; +import { getS3KeyFromInfo, getS3Key } from './s3-path-utils.js'; +import { getDocID, MAX_SOURCE_BUCKET_RETRY } from './utils.js'; + +export const VERSION_FOLDER = '.versions'; +export class CopyOptions { + /** + * @param {string} options.src source S3 key + * @param {import('../support/RequestInfo').RequestInfo} options.info destination info + * @param {boolean} options.move whether to move the source + * @param {object} [options.opts] additional options for the copy operation + * @param {function(string, string): object} [options.fnOpts] function returning per-file options, + * called with source and destination S3 keys. Used by folder copies. + * @param {object} options.collOpts collision options + */ + constructor({ + src, info, move, opts, fnOpts, collOpts, + }) { + this.src = src; + this.info = info; + this.move = move; + this.opts = opts; + this.fnOpts = fnOpts; + this.collOpts = collOpts; + } +} + +/** + * Get the headers for the source file response. + * + * @param {Object} meta The metadata that contains many of the headers + * @param {number} length The content length + * @return {Object} headers + */ +function getFileHeaders(meta, length) { + const headers = { + 'Content-Type': meta.ContentType, + 'Last-Modified': meta.LastModified.toUTCString(), + }; + if (length) { + headers['Content-Length'] = length; + } + if (meta.ETag) { + headers.ETag = meta.ETag; + } + return headers; +} + +/** + * Get the user from the context and return their email. + * If no user is found, return 'anonymous'. + * + * @param {import('../support/AdminContext').AdminContext} context context + * @return {string} user or 'anonymous' + */ +function getUser(context) { + const { authInfo } = context; + return authInfo.resolveEmail() || 'anonymous'; +} + +/** + * Access a file from the source bus. + * + * @param {import('../support/AdminContext').AdminContext} context context + * @param {string} key key to access the file at storage + * @param {boolean} headRequest whether to return the headers only for a HEAD request + * @returns {Promise} response with the file body and metadata + */ +export async function accessSourceFile(context, key, headRequest) { + const bucket = HelixStorage.fromContext(context).sourceBus(); + if (headRequest) { + const head = await bucket.head(key); + if (!head) { + return new Response('', { status: 404 }); + } + + const length = head.Metadata?.['uncompressed-length'] || head.ContentLength; + const headers = getFileHeaders(head, length); + return new Response('', { status: head.$metadata.httpStatusCode, headers }); + } else { + const meta = {}; + const body = await bucket.get(key, meta); + if (!body) { + return new Response('', { status: 404 }); + } + + const headers = getFileHeaders(meta, body.length); + return new Response(body, { status: 200, headers }); + } +} + +/** + * Create a version of the source file. + * + * @param {import('../support/AdminContext').AdminContext} context context + * @param {string} baseKey base key of the source file, must not start with a slash + * @param {string} operation operation that triggered the version creation + * @param {string} comment comment for the version + * @param {string} etag ETag of the source file to version (optional) + * @returns {Promise} response with the file body and metadata + */ +export async function createVersion(context, baseKey, operation, comment, etag) { + if (baseKey.startsWith('/')) { + return new Response('', { status: 400 }); + } + + const { org, site } = context.config; + + try { + const bucket = HelixStorage.fromContext(context).sourceBus(); + + const maxRetry = context.attributes.maxSourceBucketRetry ?? MAX_SOURCE_BUCKET_RETRY; + let attempt = 0; + while (true) { + try { + // eslint-disable-next-line no-await-in-loop + const head = await bucket.head(baseKey); + if (!head) { + return new Response('', { status: 404 }); + } + + const id = getDocID(head); + const versionFolderKey = `${org}/${site}/${VERSION_FOLDER}/${id}/`; + const pathName = `/${baseKey.split('/').slice(2).join('/')}`; + + const versionId = ulid(); + const versionKey = `${versionFolderKey}${versionId}`; + + const addMetadata = { + 'doc-path-hint': pathName, + 'doc-last-modified': head.LastModified.toISOString(), + 'version-by': getUser(context), + ...(comment && { 'version-comment': comment }), + ...(operation && { 'version-operation': operation }), + }; + const renameMetadata = { + 'last-modified-by': 'doc-last-modified-by', + }; + const copyOpts = { CopySourceIfMatch: etag || head.ETag }; + + // eslint-disable-next-line no-await-in-loop + await bucket.copy(baseKey, versionKey, { addMetadata, renameMetadata, copyOpts }); + + const headers = { + Location: `/${org}/sites/${site}/source${pathName}/${VERSION_FOLDER}/${versionId}`, + }; + + // copy was successful, we're done + return new Response('', { status: 201, headers }); + } catch (e) { + attempt += 1; + if (attempt > maxRetry) { + throw e; + } + + // Retry if we received a 412 precondition failed, but not if the etag was provided to + // this function (because in that case looping were won't refesh the etag). + if (e.$metadata?.httpStatusCode !== 412 || etag) { + throw e; + } + + // We end up when the response is a 412 Precondition Failed, which means that + // the document that we're about to version has been changed since we obtained + // its metadata. We need to redo the operation with fresh metadata. + } + } + } catch (e) { + const opts = { e, log: context.log }; + opts.status = e.$metadata?.httpStatusCode; + return createErrorResponse(opts); + } +} + +/** + * Make the destination key unique by appendina base-36 encoded timestamp to the name + * before the extension. This way the names will be alphabetically sortable and unique. + * @param {string} destKey destination S3 key + * @returns {string} unique destination S3 key + */ +function getUniqueDestinationKey(destKey) { + const ext = `.${destKey.split('.').pop()}`; + const destWithoutExt = destKey.slice(0, -ext.length); + + const ts = Date.now().toString(36); + return `${destWithoutExt}-${ts}${ext}`; +} + +/** + * Create a version of the destination document. Then we adopt the document ID of + * the existing document, so that the previous versions of it, appear in its version + * history, also after copying. + * + * @param {import('../support/AdminContext').AdminContext} context context + * @param {string} destKey destination S3 key + * @param {object} initialOpts initial options for the copy operation + * @returns {Promise<{mdOpts: object, copyOpts: object}>} retry options +*/ +async function versionBeforeOverwrite(context, destKey, initialOpts) { + const bucket = HelixStorage.fromContext(context).sourceBus(); + const dest = await bucket.head(destKey); + + // version what's there before overwriting it, provide the destination ETag so that we + // know we're versioning what we just did a head() of. + const versionResp = await createVersion(context, destKey, 'copy', 'Version created before overwrite', dest.ETag); + if (versionResp.status !== 201) { + if (versionResp.status !== 412 && versionResp.status !== 409) { + throw new StatusCodeError('Failed to version the destination', versionResp.status); + } + + // If we receive a 412 or 409 from the version creation, we just retry + return { mdOpts: initialOpts, copyOpts: { IfNoneMatch: '*' } }; + } + + // Creating the version was successful, so we can now copy over the destination. + + const getDestDocId = getDocID(dest); + + // If something is at the destination already, we copy over that file, but keep + // the doc ID from the destination as-is so that the destination keeps its history. + const mdOpts = { ...initialOpts, addMetadata: { 'doc-id': getDestDocId } }; + + // Now only copy over the destination if it's still the same as what we did a head() of + return { mdOpts, copyOpts: { IfMatch: dest.ETag } }; +} + +/** + * + * @param {Error} err error from the copy operation + * @param {import('../support/AdminContext').AdminContext} context context + * @param {string} destinationKey destination S3 key + * @param {object} mdOpts metadata options for the copy operation + * @param {object} collOpts collision options + * @param {number} curAttempt current attempt number + * @returns {Promise<{destinationKey: string, mdOpts: object, copyOpts: object, attempt: number}>} + * retry options + */ +async function getCopyRetryOpts(err, context, destinationKey, mdOpts, collOpts, curAttempt) { + const attempt = curAttempt + 1; + const maxRetry = context.attributes.maxSourceBucketRetry ?? MAX_SOURCE_BUCKET_RETRY; + if (attempt > maxRetry) { + throw err; + } + + const status = err.$metadata?.httpStatusCode; + + // As per S3 docs, just retry on a 409 with the same options + if (status === 409) { + return { + destinationKey, + mdOpts, + copyOpts: { IfNoneMatch: '*' }, + attempt, + }; + } + + if (status !== 412) { + throw err; + } + + // 412: precondition failed - something is at the destination already. + if (collOpts.collision === 'unique') { + return { + destinationKey: getUniqueDestinationKey(destinationKey), + mdOpts, + copyOpts: { IfNoneMatch: '*' }, + attempt, + }; + } + + if (collOpts.collision === 'overwrite') { + const { mdOpts: mdo, copyOpts } = await versionBeforeOverwrite(context, destinationKey, mdOpts); + return { + destinationKey, + mdOpts: mdo, + copyOpts, + attempt, + }; + } + + throw new StatusCodeError('Collision: something is at the destination already', 409); +} + +/** + * Copy a file and handle conflicts, if the destination already exists. It will then retry + * after taking the specified action to handle the conflict. + * + * @param {import('../support/AdminContext').AdminContext} context context + * @param {string} srcKey source S3 key + * @param {string} destKey destination S3 key + * @param {boolean} move true if this is a move operation + * @param {object} mdOptions metadata options for the copy operation + * @param {object} collOpts collision options (e.g { collision: 'overwrite' } ), + * these collision options are used to handle conflicts when copying the source to the + * destination. + * - 'overwrite' - overwrite the destination if it exists, but create a version of + * the destination first. + * - 'unique' - append a (base-36) encoded timestamp to the destination key to make it unique. + * These timestamps are alphabetically sortable. + */ +async function copyWithRetry( + context, + srcKey, + destKey, + move, + mdOptions, + collOpts, +) { + const bucket = HelixStorage.fromContext(context).sourceBus(); + let destinationKey = destKey; + let mdOpts = mdOptions; + + // We start with assuming that there is nothing at the destination, the happy path + let copyOpts = { IfNoneMatch: '*' }; + + let attempt = 0; + while (true) { + try { + const allOpts = { copyOpts, ...mdOpts }; + // eslint-disable-next-line no-await-in-loop + await bucket.copy(srcKey, destinationKey, allOpts); + + break; // copy was successful, break out of the loop - we're done! + } catch (e) { + // eslint-disable-next-line no-await-in-loop + const retryOpts = await getCopyRetryOpts(e, context, destKey, mdOptions, collOpts, attempt); + ({ + destinationKey, + mdOpts, + copyOpts, + attempt, + } = retryOpts); + } + } + + if (move) { + const resp = await bucket.remove(srcKey); + if (resp.$metadata?.httpStatusCode !== 204) { + throw new StatusCodeError(`Failed to remove source: ${srcKey}`, resp.$metadata?.httpStatusCode); + } + } +} + +async function copyFile(context, srcKey, destKey, move, opts, collOpts) { + const copyOpts = { ...opts }; + if (!move) { + copyOpts.addMetadata = { 'doc-id': ulid() }; + } + await copyWithRetry(context, srcKey, destKey, move, copyOpts, collOpts); +} + +/** + * Copies a document from the source to the destination. + * + * @param {import('../support/AdminContext').AdminContext} context context + * @param {CopyOptions} copyOptions copy options + * @returns {Promise>} the copied file details + */ +export async function copyDocument(context, copyOptions) { + const { + src, info, move, opts, collOpts, + } = copyOptions; + const dst = getS3KeyFromInfo(info); + await copyFile(context, src, dst, move, opts, collOpts); + return [{ src, dst }]; +} + +/** + * Copies a folder from the source to the destination. + * + * @param {import('../support/AdminContext').AdminContext} context context + * @param {CopyOptions} copyOptions copy options + * @returns {Promise>} the copied files + */ +export async function copyFolder(context, copyOptions) { + const { + src: srcKey, info, move, fnOpts = () => ({}), collOpts, + } = copyOptions; + const tasks = []; + const destKey = getS3Key(info.org, info.site, info.rawPath); + + if (destKey.startsWith(srcKey)) { + throw new StatusCodeError('Destination cannot be a subfolder of source', 400); + } + + const bucket = HelixStorage.fromContext(context).sourceBus(); + (await bucket.list(srcKey)).forEach((obj) => { + tasks.push({ + src: obj.key, + dst: `${destKey}${obj.path}`, + }); + }); + + if (tasks.length === 0) { + // Nothing found at source + throw new StatusCodeError('Not found', 404); + } + + const copied = []; + await processQueue(tasks, async (task) => { + const opts = fnOpts(task.src, task.dst); + await copyFile(context, task.src, task.dst, move, opts, collOpts); + copied.push({ src: task.src, dst: task.dst }); + }); + return copied; +} + +/** + * Store file based on key and body in the source bus. + * The file is assumes already have been validated. + * + * @param {import('../support/AdminContext').AdminContext} context context + * @param {string} key key to store the file at (including extension) + * @param {string} mime the mime type of the file + * @param {Buffer} body content body + * @returns {Promise} response + */ +export async function storeSourceFile(context, key, mime, body) { + const bucket = HelixStorage.fromContext(context).sourceBus(); + + const head = await bucket.head(key); + const id = head?.Metadata?.['doc-id'] || ulid(); + + const resp = await bucket.put(key, body, mime, { + 'Last-Modified-By': getUser(context), + 'Uncompressed-Length': String(body.length), + 'doc-id': id, + }, true); + + const status = resp.$metadata.httpStatusCode === 200 ? 201 : resp.$metadata.httpStatusCode; + return new Response('', { status }); +} diff --git a/src/source/utils.js b/src/source/utils.js index 1619011..60f2907 100644 --- a/src/source/utils.js +++ b/src/source/utils.js @@ -9,16 +9,12 @@ * OF ANY KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ - -import { Response } from '@adobe/fetch'; import { maxSizeMediaFilter, MediaHandler } from '@adobe/helix-mediahandler'; import processQueue from '@adobe/helix-shared-process-queue'; -import { HelixStorage } from '@adobe/helix-shared-storage'; import { format } from 'hast-util-format'; import { fromHtml } from 'hast-util-from-html'; import { select } from 'hast-util-select'; import { toHtml } from 'hast-util-to-html'; -import { ulid } from 'ulid'; import { visit, CONTINUE } from 'unist-util-visit'; import { MEDIA_TYPES } from '../media/validate.js'; import { StatusCodeError } from '../support/StatusCodeError.js'; @@ -107,29 +103,6 @@ function getHast(body) { }); } -/** - * Get the S3 key from the organization, site, and path. - * - * @param {string} org organization - * @param {string} site site - * @param {string} path document path - * @returns {string} the S3 key - */ -export function getS3Key(org, site, path) { - return `${org}/${site}${path}`; -} - -/** - * Get the source bus key from the request info. - * - * @param {import('../support/RequestInfo').RequestInfo} info request info - * @return {string} the source bus path - */ -export function getS3KeyFromInfo(info) { - const { org, site, resourcePath } = info; - return getS3Key(org, site, resourcePath); -} - /** * Get the document ID from the head, by reading it from the Metadata. * @@ -349,92 +322,3 @@ export async function getValidPayload(context, info, mime, internImages) { } return body; } - -/** - * Get the headers for the source file response. - * - * @param {Object} meta The metadata that contains many of the headers - * @param {number} length The content length - * @return {Object} headers - */ -export function getFileHeaders(meta, length) { - const headers = { - 'Content-Type': meta.ContentType, - 'Last-Modified': meta.LastModified.toUTCString(), - }; - if (length) { - headers['Content-Length'] = length; - } - if (meta.ETag) { - headers.ETag = meta.ETag; - } - return headers; -} - -/** - * Access a file from the source bus. - * - * @param {import('../support/AdminContext').AdminContext} context context - * @param {string} key key to access the file at storage - * @param {boolean} headRequest whether to return the headers only for a HEAD request - * @returns {Promise} response with the file body and metadata - */ -export async function accessSourceFile(context, key, headRequest) { - const bucket = HelixStorage.fromContext(context).sourceBus(); - if (headRequest) { - const head = await bucket.head(key); - if (!head) { - return new Response('', { status: 404 }); - } - - const length = head.Metadata?.['uncompressed-length'] || head.ContentLength; - const headers = getFileHeaders(head, length); - return new Response('', { status: head.$metadata.httpStatusCode, headers }); - } else { - const meta = {}; - const body = await bucket.get(key, meta); - if (!body) { - return new Response('', { status: 404 }); - } - - const headers = getFileHeaders(meta, body.length); - return new Response(body, { status: 200, headers }); - } -} - -/** - * Get the user from the context and return their email. - * If no user is found, return 'anonymous'. - * - * @param {import('../support/AdminContext').AdminContext} context context - * @return {string} user or 'anonymous' - */ -export function getUser(context) { - return context.authInfo.profile?.email || 'anonymous'; -} - -/** - * Store file based on key and body in the source bus. - * The file is assumes already have been validated. - * - * @param {import('../support/AdminContext').AdminContext} context context - * @param {string} key key to store the file at (including extension) - * @param {string} mime the mime type of the file - * @param {Buffer} body content body - * @returns {Promise} response - */ -export async function storeSourceFile(context, key, mime, body) { - const bucket = HelixStorage.fromContext(context).sourceBus(); - - const head = await bucket.head(key); - const id = head?.Metadata?.['doc-id'] || ulid(); - - const resp = await bucket.put(key, body, mime, { - 'Last-Modified-By': getUser(context), - 'Uncompressed-Length': String(body.length), - 'doc-id': id, - }, true); - - const status = resp.$metadata.httpStatusCode === 200 ? 201 : resp.$metadata.httpStatusCode; - return new Response('', { status }); -} diff --git a/src/source/versions.js b/src/source/versions.js index fb56f31..136f523 100644 --- a/src/source/versions.js +++ b/src/source/versions.js @@ -13,19 +13,12 @@ import { Response } from '@adobe/fetch'; import processQueue from '@adobe/helix-shared-process-queue'; import { HelixStorage } from '@adobe/helix-shared-storage'; -import { isValid, ulid } from 'ulid'; +import { isValid } from 'ulid'; import { createErrorResponse } from '../contentbus/utils.js'; import { StatusCodeError } from '../support/StatusCodeError.js'; - -import { - accessSourceFile, - getS3Key, - getDocID, - getUser, - MAX_SOURCE_BUCKET_RETRY, -} from './utils.js'; - -export const VERSION_FOLDER = '.versions'; +import { getS3Key } from './s3-path-utils.js'; +import { accessSourceFile, VERSION_FOLDER } from './source-client.js'; +import { getDocID } from './utils.js'; function handleNoVersions() { const headers = { @@ -135,85 +128,3 @@ export async function getOrListVersions(context, info, headRequest) { return createErrorResponse(opts); } } - -/** - * Create a version of the source file. - * - * @param {import('../support/AdminContext').AdminContext} context context - * @param {string} baseKey base key of the source file, must not start with a slash - * @param {string} operation operation that triggered the version creation - * @param {string} comment comment for the version - * @param {string} etag ETag of the source file to version (optional) - * @returns {Promise} response with the file body and metadata - */ -export async function postVersion(context, baseKey, operation, comment, etag) { - if (baseKey.startsWith('/')) { - return new Response('', { status: 400 }); - } - - const { org, site } = context.config; - - try { - const bucket = HelixStorage.fromContext(context).sourceBus(); - - const maxRetry = context.attributes.maxSourceBucketRetry ?? MAX_SOURCE_BUCKET_RETRY; - let attempt = 0; - while (true) { - try { - // eslint-disable-next-line no-await-in-loop - const head = await bucket.head(baseKey); - if (!head) { - return new Response('', { status: 404 }); - } - - const id = getDocID(head); - const versionFolderKey = `${org}/${site}/${VERSION_FOLDER}/${id}/`; - const pathName = `/${baseKey.split('/').slice(2).join('/')}`; - - const versionId = ulid(); - const versionKey = `${versionFolderKey}${versionId}`; - - const addMetadata = { - 'doc-path-hint': pathName, - 'doc-last-modified': head.LastModified.toISOString(), - 'version-by': getUser(context), - ...(comment && { 'version-comment': comment }), - ...(operation && { 'version-operation': operation }), - }; - const renameMetadata = { - 'last-modified-by': 'doc-last-modified-by', - }; - const copyOpts = { CopySourceIfMatch: etag || head.ETag }; - - // eslint-disable-next-line no-await-in-loop - await bucket.copy(baseKey, versionKey, { addMetadata, renameMetadata, copyOpts }); - - const headers = { - Location: `/${org}/sites/${site}/source${pathName}/${VERSION_FOLDER}/${versionId}`, - }; - - // copy was successful, we're done - return new Response('', { status: 201, headers }); - } catch (e) { - attempt += 1; - if (attempt > maxRetry) { - throw e; - } - - // Retry if we received a 412 precondition failed, but not if the etag was provided to - // this function (because in that case looping were won't refesh the etag). - if (e.$metadata?.httpStatusCode !== 412 || etag) { - throw e; - } - - // We end up when the response is a 412 Precondition Failed, which means that - // the document that we're about to version has been changed since we obtained - // its metadata. We need to redo the operation with fresh metadata. - } - } - } catch (e) { - const opts = { e, log: context.log }; - opts.status = e.$metadata?.httpStatusCode; - return createErrorResponse(opts); - } -} diff --git a/test/source/delete.test.js b/test/source/delete.test.js index 3f76a13..edc758b 100644 --- a/test/source/delete.test.js +++ b/test/source/delete.test.js @@ -12,6 +12,7 @@ /* eslint-env mocha */ import assert from 'assert'; +import xml2js from 'xml2js'; import { deleteSource } from '../../src/source/delete.js'; import { createInfo, Nock } from '../utils.js'; import { setupContext } from './testutils.js'; @@ -30,7 +31,21 @@ describe('Source Delete Tests', () => { nock.done(); }); - it('test deleteSource', async () => { + it('test deleteSource moves to trash', async () => { + nock.source() + .headObject('/test/rest/toast/jam.html') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .copyObject('/test/rest/.trash/jam.html') + .matchHeader('x-amz-copy-source', 'helix-source-bus/test/rest/toast/jam.html') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); nock.source() .deleteObject('/test/rest/toast/jam.html') .reply(204); @@ -42,11 +57,234 @@ describe('Source Delete Tests', () => { it('test deleteSource propagates S3 errors', async () => { nock.source() - .deleteObject('/test/rest/toast/error.html') + .headObject('/test/rest/toast/error.html') .reply(503); const info = createInfo('/test/sites/rest/source/toast/error.html'); const resp = await deleteSource(context, info); assert.equal(resp.status, 503); }); + + it('test delete folder', async () => { + nock.listObjects('helix-source-bus', 'org1/site2/.trash/b/', []); + nock.listObjects('helix-source-bus', 'org1/site2/a/b/', [ + { Key: 'c/some.json' }, + { Key: 'c/my.pdf' }, + { Key: 'page.html' }, + ], ''); + + nock.source() + .headObject('/org1/site2/a/b/c/some.json') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .headObject('/org1/site2/a/b/c/my.pdf') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .headObject('/org1/site2/a/b/page.html') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .copyObject('/org1/site2/.trash/b/c/some.json') + .matchHeader('x-amz-copy-source', 'helix-source-bus/org1/site2/a/b/c/some.json') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + nock.source() + .copyObject('/org1/site2/.trash/b/c/my.pdf') + .matchHeader('x-amz-copy-source', 'helix-source-bus/org1/site2/a/b/c/my.pdf') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + nock.source() + .copyObject('/org1/site2/.trash/b/page.html') + .matchHeader('x-amz-copy-source', 'helix-source-bus/org1/site2/a/b/page.html') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + + nock.source() + .deleteObject('/org1/site2/a/b/c/some.json') + .reply(204); + nock.source() + .deleteObject('/org1/site2/a/b/c/my.pdf') + .reply(204); + nock.source() + .deleteObject('/org1/site2/a/b/page.html') + .reply(204); + const info = createInfo('/org1/sites/site2/source/a/b/'); + const resp = await deleteSource(context, info); + assert.equal(resp.status, 204); + }); + + it('test delete folder which is already in the trash', async () => { + nock.listObjects('helix-source-bus', 'org1/site2/.trash/b/', [ + { Key: 'hello.html' }, + ]); + nock.listObjects('helix-source-bus', 'org1/site2/a/b/', [ + { Key: 'c/some.json' }, + { Key: 'c/my.pdf' }, + { Key: 'page.html' }, + ], ''); + + nock.source() + .headObject('/org1/site2/a/b/c/some.json') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .headObject('/org1/site2/a/b/c/my.pdf') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .headObject('/org1/site2/a/b/page.html') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + + // Note that the difference is here: the target folder (b) has a suffix of + // 8 characters to make it unique. + nock.source() + .copyObject(/^\/org1\/site2\/.trash\/b-.{8}\/c\/some.json$/) + .matchHeader('x-amz-copy-source', 'helix-source-bus/org1/site2/a/b/c/some.json') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + nock.source() + .copyObject(/^\/org1\/site2\/.trash\/b-.{8}\/c\/my.pdf$/) + .matchHeader('x-amz-copy-source', 'helix-source-bus/org1/site2/a/b/c/my.pdf') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + nock.source() + .copyObject(/^\/org1\/site2\/.trash\/b-.{8}\/page.html$/) + .matchHeader('x-amz-copy-source', 'helix-source-bus/org1/site2/a/b/page.html') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + + nock.source() + .deleteObject('/org1/site2/a/b/c/some.json') + .reply(204); + nock.source() + .deleteObject('/org1/site2/a/b/c/my.pdf') + .reply(204); + nock.source() + .deleteObject('/org1/site2/a/b/page.html') + .reply(204); + + const info = createInfo('/org1/sites/site2/source/a/b/'); + const resp = await deleteSource(context, info); + assert.equal(resp.status, 204); + }); + + it('test delete folder not found', async () => { + nock.listObjects('helix-source-bus', 'org1/site2/.trash/nope/', []); + nock.listObjects('helix-source-bus', 'org1/site2/nope/', [], ''); + const info = createInfo('/org1/sites/site2/source/nope/'); + const resp = await deleteSource(context, info); + assert.equal(resp.status, 404); + }); + + it('test delete folder with file error', async () => { + nock.listObjects('helix-source-bus', 'org1/site2/.trash/b/', []); + nock.listObjects('helix-source-bus', 'org1/site2/a/b/', [ + { Key: 'c/some.json' }, + { Key: 'c/my.pdf' }, + { Key: 'page.html' }, + ], ''); + + nock.source() + .headObject('/org1/site2/a/b/c/some.json') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .headObject('/org1/site2/a/b/c/my.pdf') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .headObject('/org1/site2/a/b/page.html') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .copyObject('/org1/site2/.trash/b/c/some.json') + .matchHeader('x-amz-copy-source', 'helix-source-bus/org1/site2/a/b/c/some.json') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + nock.source() + .copyObject('/org1/site2/.trash/b/c/my.pdf') + .matchHeader('x-amz-copy-source', 'helix-source-bus/org1/site2/a/b/c/my.pdf') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + nock.source() + .copyObject('/org1/site2/.trash/b/page.html') + .matchHeader('x-amz-copy-source', 'helix-source-bus/org1/site2/a/b/page.html') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + + nock.source() + .deleteObject('/org1/site2/a/b/c/some.json') + .reply(500); + nock.source() + .deleteObject('/org1/site2/a/b/c/my.pdf') + .reply(204); + nock.source() + .deleteObject('/org1/site2/a/b/page.html') + .reply(500); + const info = createInfo('/org1/sites/site2/source/a/b/'); + const resp = await deleteSource(context, info); + assert.equal(resp.status, 500); + }); + + it('test delete folder error', async () => { + nock.listObjects('helix-source-bus', 'org1/site2/.trash/nope/', []); + nock('https://helix-source-bus.s3.us-east-1.amazonaws.com') + .get('/') + .query({ + 'list-type': '2', + prefix: 'org1/site2/nope/', + }) + .reply(503); + const info = createInfo('/org1/sites/site2/source/nope/'); + const resp = await deleteSource(context, info); + assert.equal(resp.status, 503); + }); }); diff --git a/test/source/folder.test.js b/test/source/folder.test.js index ebe9ef0..bf1e824 100644 --- a/test/source/folder.test.js +++ b/test/source/folder.test.js @@ -13,7 +13,7 @@ /* eslint-env mocha */ /* eslint-disable no-param-reassign */ import assert from 'assert'; -import { deleteSource } from '../../src/source/delete.js'; +import { deleteFolder } from '../../src/source/folder.js'; import { getSource, headSource } from '../../src/source/get.js'; import { postSource } from '../../src/source/post.js'; import { createInfo, Nock } from '../utils.js'; @@ -274,7 +274,7 @@ describe('Source List Tests', () => { .deleteObject('/org1/site2/a/b/page.html') .reply(204); const info = createInfo('/org1/sites/site2/source/a/b/'); - const resp = await deleteSource(context, info); + const resp = await deleteFolder(context, info); assert.equal(resp.status, 204); }); @@ -287,7 +287,7 @@ describe('Source List Tests', () => { }) .reply(200, Buffer.from('abc')); const info = createInfo('/org1/sites/site2/source/nope/'); - const resp = await deleteSource(context, info); + const resp = await deleteFolder(context, info); assert.equal(resp.status, 404); }); @@ -309,7 +309,7 @@ describe('Source List Tests', () => { .deleteObject('/org1/site2/a/b/page.html') .reply(500); const info = createInfo('/org1/sites/site2/source/a/b/'); - const resp = await deleteSource(context, info); + const resp = await deleteFolder(context, info); assert.equal(resp.status, 500); }); @@ -322,7 +322,7 @@ describe('Source List Tests', () => { }) .reply(503); const info = createInfo('/org1/sites/site2/source/nope/'); - const resp = await deleteSource(context, info); + const resp = await deleteFolder(context, info); assert.equal(resp.status, 503); }); diff --git a/test/source/handler.test.js b/test/source/handler.test.js index f8e6e3d..8624f9b 100644 --- a/test/source/handler.test.js +++ b/test/source/handler.test.js @@ -14,6 +14,7 @@ /* eslint-disable no-param-reassign */ import assert from 'assert'; import { promisify } from 'util'; +import xml2js from 'xml2js'; import zlib from 'zlib'; import { Headers, Request } from '@adobe/fetch'; import { AuthInfo } from '../../src/auth/AuthInfo.js'; @@ -141,6 +142,20 @@ describe('Source Handler Tests', () => { }); it('handles DELETE requests', async () => { + nock.source() + .headObject('/org/site/to/be/deleted.html') + .reply(200, null, { + 'last-modified': 'Thu, 2 Apr 2026 08:34:46 GMT', + }); + nock.source() + .copyObject('/org/site/.trash/deleted.html') + .matchHeader('x-amz-copy-source', 'helix-source-bus/org/site/to/be/deleted.html') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: 'abaca', + }, + })); nock.source() .deleteObject('/org/site/to/be/deleted.html') .reply(204); @@ -150,7 +165,24 @@ describe('Source Handler Tests', () => { assert.equal(resp.status, 204); }); + const BUCKET_LIST_EMPTY_TRASH = ` + + my-bucket + org/site/.trash/myfolder/ + + 1000 + false + `; + it('handles DELETE requests to delete a folder', async () => { + nock.source() + .get('/') + .query({ + 'list-type': '2', + delimiter: '/', + prefix: 'org/site/.trash/myfolder/', + }) + .reply(200, Buffer.from(BUCKET_LIST_EMPTY_TRASH)); nock.source() .get('/') .query({ @@ -158,6 +190,36 @@ describe('Source Handler Tests', () => { prefix: 'org/site/myfolder/', }) .reply(200, Buffer.from(BUCKET_LIST_RESULT)); + + nock.source() + .headObject('/org/site/myfolder/sub/abc.pdf') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .headObject('/org/site/myfolder/xyz.html') + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .copyObject('/org/site/.trash/myfolder/sub/abc.pdf') + .matchHeader('x-amz-copy-source', 'helix-source-bus/org/site/myfolder/sub/abc.pdf') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + nock.source() + .copyObject('/org/site/.trash/myfolder/xyz.html') + .matchHeader('x-amz-copy-source', 'helix-source-bus/org/site/myfolder/xyz.html') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '314159', + }, + })); + nock.source() .deleteObject('/org/site/myfolder/sub/abc.pdf') .reply(204); diff --git a/test/source/put.test.js b/test/source/put.test.js index 9c1b041..c9698ee 100644 --- a/test/source/put.test.js +++ b/test/source/put.test.js @@ -198,7 +198,44 @@ describe('Source PUT Tests', () => { // filter json so that only the src and dst keys are present (so that we only compare those) const json = body.copied.map((item) => ({ src: item.src, dst: item.dst })); assert.deepStrictEqual(json, [ - { src: 'testorg/testsite/src.html', dst: 'testorg/testsite/dst.html' }, + { src: '/src.html', dst: '/dst.html' }, + ]); + assert.equal(resp.status, 200); + assert.equal('application/json', resp.headers.get('content-type')); + }); + + it('test putSource copies a file needs to retry with 409', async () => { + nock.source() + .headObject('/testorg/testsite/src.html') + .twice() + .reply(200, null, { + 'last-modified': 'Tue, 25 Oct 2022 02:57:46 GMT', + }); + nock.source() + .copyObject('/testorg/testsite/dst.html') + .matchHeader('x-amz-copy-source', 'helix-source-bus/testorg/testsite/src.html') + .matchHeader('if-none-match', '*') + .reply(409); + nock.source() + .copyObject('/testorg/testsite/dst.html') + .matchHeader('x-amz-copy-source', 'helix-source-bus/testorg/testsite/src.html') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '123', + }, + })); + + const path = '/testorg/sites/testsite/source/dst.html'; + const ctx = setupContext(path); + ctx.data.source = '/src.html'; + const resp = await putSource(ctx, createInfo(path, {}, 'PUT')); + + const body = await resp.json(); + // filter json so that only the src and dst keys are present (so that we only compare those) + const json = body.copied.map((item) => ({ src: item.src, dst: item.dst })); + assert.deepStrictEqual(json, [ + { src: '/src.html', dst: '/dst.html' }, ]); assert.equal(resp.status, 200); assert.equal('application/json', resp.headers.get('content-type')); @@ -286,7 +323,7 @@ describe('Source PUT Tests', () => { const body = await resp.json(); assert.deepStrictEqual(body, { copied: [ - { src: 'o1/s1/s/src.html', dst: 'o1/s1/t/to.html' }, + { src: '/s/src.html', dst: '/t/to.html' }, ], }); }); @@ -315,7 +352,7 @@ describe('Source PUT Tests', () => { const resp = await putSource(ctx, createInfo(path)); assert.equal(resp.status, 409); - assert.equal(resp.headers.get('x-error'), 'Collision: something is at the destination already, no overwrite option provided'); + assert.equal(resp.headers.get('x-error'), 'Collision: something is at the destination already'); }); it('test putSource copies a file with 412 collision and move specified', async () => { @@ -502,6 +539,44 @@ describe('Source PUT Tests', () => { assert.equal(500, resp.status); }); + it('test putSource moves a file with 412 collision and unique rename', async () => { + // First copy attempt returns 412 (destination already exists, IfNoneMatch: * fails) + nock.source() + .copyObject('/o1/s1/t/to.html') + .matchHeader('x-amz-copy-source', 'helix-source-bus/o1/s1/s/src.html') + .matchHeader('if-none-match', '*') + .reply(412); + + // In the retry an 8-char suffix to the name is added + nock.source() + .copyObject(/^\/o1\/s1\/t\/to-.{8}\.html$/) + .matchHeader('x-amz-copy-source', 'helix-source-bus/o1/s1/s/src.html') + .matchHeader('if-none-match', '*') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '123', + }, + })); + + nock.source() + .deleteObject('/o1/s1/s/src.html') + .reply(204); + + const path = '/o1/sites/s1/source/t/to.html'; + const ctx = setupContext(path, { + data: { + source: '/s/src.html', + collision: 'unique', + move: 'true', + }, + }); + ctx.config.org = 'o1'; + ctx.config.site = 's1'; + + const resp = await putSource(ctx, createInfo(path)); + assert.equal(resp.status, 200); + }); + const BUCKET_LIST_RESULT = ` my-bucket @@ -587,9 +662,9 @@ describe('Source PUT Tests', () => { // filter json so that only the src and dst keys are present (so that we only compare those) const json = body.copied.map((item) => ({ src: item.src, dst: item.dst })); assert.deepStrictEqual(json, [ - { src: 'org1/site2/a/b/c/somejson.json', dst: 'org1/site2/dest/somejson.json' }, - { src: 'org1/site2/a/b/c/d1.html', dst: 'org1/site2/dest/d1.html' }, - { src: 'org1/site2/a/b/c/d/d2.html', dst: 'org1/site2/dest/d/d2.html' }, + { src: '/a/b/c/somejson.json', dst: '/dest/somejson.json' }, + { src: '/a/b/c/d1.html', dst: '/dest/d1.html' }, + { src: '/a/b/c/d/d2.html', dst: '/dest/d/d2.html' }, ]); assert.equal(resp.status, 200); assert.equal('application/json', resp.headers.get('content-type')); @@ -637,7 +712,7 @@ describe('Source PUT Tests', () => { assert.equal(resp.status, 200); const body = await resp.json(); assert.deepStrictEqual(body.moved, [ - { src: 'org123/456site/foo/bar/src.html', dst: 'org123/456site/lala/dst.html' }, + { src: '/foo/bar/src.html', dst: '/lala/dst.html' }, ]); assert.equal('application/json', resp.headers.get('content-type')); }); @@ -729,8 +804,8 @@ describe('Source PUT Tests', () => { // remove all keys from body except src and dst, for comparison const cmp = body.moved.map((item) => ({ src: item.src, dst: item.dst })); assert.deepStrictEqual(cmp, [ - { src: 'o/s/x/x.html', dst: 'o/s/hello/dest/x.html' }, - { src: 'o/s/x/sub/x.pdf', dst: 'o/s/hello/dest/sub/x.pdf' }, + { src: '/x/x.html', dst: '/hello/dest/x.html' }, + { src: '/x/sub/x.pdf', dst: '/hello/dest/sub/x.pdf' }, ]); assert.equal('application/json', resp.headers.get('content-type')); }); diff --git a/test/source/s3-path-utils.test.js b/test/source/s3-path-utils.test.js new file mode 100644 index 0000000..dfffc7a --- /dev/null +++ b/test/source/s3-path-utils.test.js @@ -0,0 +1,43 @@ +/* + * Copyright 2026 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + +/* eslint-env mocha */ +/* eslint-disable no-param-reassign */ +import assert from 'assert'; +import { getS3Key, getS3KeyFromInfo, getDocPathFromS3Key } from '../../src/source/s3-path-utils.js'; + +describe('S3 Path Utils Tests', () => { + it('test getS3Key', () => { + assert.equal(getS3Key('org1', 'site2', '/a/b/c/'), 'org1/site2/a/b/c/'); + }); + + it('test getS3Key with file path', () => { + assert.equal(getS3Key('myorg', 'mysite', '/doc.html'), 'myorg/mysite/doc.html'); + }); + + it('test getS3KeyFromInfo', () => { + const info = { org: 'org1', site: 'site2', resourcePath: '/a/b/c.html' }; + assert.equal(getS3KeyFromInfo(info), 'org1/site2/a/b/c.html'); + }); + + it('test getDocPathFromS3Key', () => { + assert.equal(getDocPathFromS3Key('org1/site2/a/b/c.html'), '/a/b/c.html'); + }); + + it('test getDocPathFromS3Key with folder', () => { + assert.equal(getDocPathFromS3Key('org1/site2/a/b/c/'), '/a/b/c/'); + }); + + it('test getDocPathFromS3Key with root file', () => { + assert.equal(getDocPathFromS3Key('org1/site2/doc.html'), '/doc.html'); + }); +}); diff --git a/test/source/source-client.test.js b/test/source/source-client.test.js new file mode 100644 index 0000000..dcd4b20 --- /dev/null +++ b/test/source/source-client.test.js @@ -0,0 +1,224 @@ +/* + * Copyright 2026 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + +/* eslint-env mocha */ +/* eslint-disable no-param-reassign */ +import assert from 'assert'; +import xml2js from 'xml2js'; +import { createVersion } from '../../src/source/source-client.js'; +import { MAX_SOURCE_BUCKET_RETRY } from '../../src/source/utils.js'; +import { Nock } from '../utils.js'; +import { setupContext } from './testutils.js'; + +describe('Source Client Tests', () => { + let context; + /** @type {import('../utils.js').NockEnv} */ + let nock; + + beforeEach(() => { + context = setupContext(); + context.config.org = 'myorg'; + context.config.site = 'mysite'; + + nock = new Nock().env(); + }); + + afterEach(() => { + nock.done(); + }); + + it('test createVersion', async () => { + let versionId; + + nock.source() + .headObject('/myorg/mysite/a/b/c.html') + .twice() + .reply(200, null, { + etag: 'foobar', + 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', + 'last-modified': 'Fri, 18 Mar 2005 01:58:31 GMT', + }); + + async function copyFn(u) { + const path = u.split('?')[0]; + const prefix = '/myorg/mysite/.versions/01KK1E35DP7EQDG9G99QQAVQ1Z/'; + assert(path.startsWith(prefix)); + versionId = path.slice(prefix.length); + + return new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '123', + }, + }); + } + + nock.source() + .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) + .matchHeader('x-amz-copy-source', 'helix-source-bus/myorg/mysite/a/b/c.html') + .matchHeader('x-amz-metadata-directive', 'REPLACE') + .matchHeader('x-amz-meta-doc-id', '01KK1E35DP7EQDG9G99QQAVQ1Z') + .matchHeader('x-amz-meta-doc-path-hint', '/a/b/c.html') + .matchHeader('x-amz-meta-doc-last-modified', '2005-03-18T01:58:31.000Z') + .matchHeader('x-amz-meta-version-by', 'anonymous') + .matchHeader('x-amz-meta-version-operation', 'testing') + .matchHeader('x-amz-meta-version-comment', 'test comment') + .reply(200, copyFn); + + const resp = await createVersion(context, 'myorg/mysite/a/b/c.html', 'testing', 'test comment'); + assert.equal(resp.status, 201); + assert.equal(resp.headers.get('location'), `/myorg/sites/mysite/source/a/b/c.html/.versions/${versionId}`); + }); + + it('test createVersion precondition failed, retry', async () => { + nock.source() + .headObject('/myorg/mysite/a/b/c.html') + .times(4) + .reply(200, null, { + etag: 'foobar', + 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', + 'last-modified': 'Tue, 04 Jun 2024 14:20:00 GMT', + }); + + nock.source() + .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) + .reply(412) + .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '123', + }, + })); + + const resp = await createVersion(context, 'myorg/mysite/a/b/c.html'); + assert.equal(resp.status, 201); + }); + + it('test createVersion precondition failed, too many retries', async () => { + nock.source() + .headObject('/myorg/mysite/a/b/c.html') + .times((MAX_SOURCE_BUCKET_RETRY + 1) * 2) + .reply(200, null, { + etag: 'foobar', + 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', + 'last-modified': 'Tue, 04 Jun 2024 14:20:00 GMT', + }); + + nock.source() + .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) + .times(MAX_SOURCE_BUCKET_RETRY + 1) + .reply(412); + + const resp = await createVersion(context, 'myorg/mysite/a/b/c.html', 'abc', 'def'); + assert.equal(resp.status, 412); + }); + + it('test createVersion precondition failed, configured max retries', async () => { + context.attributes.maxSourceBucketRetry = 2; + + nock.source() + .headObject('/myorg/mysite/a/b/c.html') + .times(6) + .reply(200, null, { + etag: 'foobar', + 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', + 'last-modified': 'Tue, 04 Jun 2024 14:20:00 GMT', + }); + + nock.source() + .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) + .times(3) + .reply(412); + + const resp = await createVersion(context, 'myorg/mysite/a/b/c.html', 'abc', 'def'); + assert.equal(resp.status, 412); + }); + + it('test createVersion error', async () => { + nock.source() + .headObject('/myorg/mysite/a/b/c.html') + .twice() + .reply(200, null, { + etag: 'foobar', + 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', + 'last-modified': 'Tue, 04 Jun 2024 14:20:00 GMT', + }); + + nock.source() + .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) + .reply(403); + + const resp = await createVersion(context, 'myorg/mysite/a/b/c.html'); + assert.equal(resp.status, 403); + }); + + it('test createVersion invalid base key', async () => { + const resp = await createVersion(context, '/myorg/mysite/a/b/c.html'); + assert.equal(resp.status, 400); + }); + + it('test createVersion with etag', async () => { + nock.source() + .headObject('/myorg/mysite/hello.html') + .twice() + .reply(200, null, { + etag: 'mwhaha', + 'x-amz-meta-doc-id': '01KMD45QKPY7S9Y7BDKP0E019Q', + 'last-modified': 'Fri, 18 Mar 2005 01:58:31 GMT', + }); + + const etag = 'foobar'; + nock.source() + .copyObject(/myorg\/mysite\/.versions\/01KMD45QKPY7S9Y7BDKP0E019Q\/.+/) + .matchHeader('x-amz-copy-source', 'helix-source-bus/myorg/mysite/hello.html') + .matchHeader('x-amz-copy-source-if-match', etag) + .matchHeader('x-amz-meta-doc-id', '01KMD45QKPY7S9Y7BDKP0E019Q') + .matchHeader('x-amz-meta-doc-path-hint', '/hello.html') + .matchHeader('x-amz-meta-doc-last-modified', '2005-03-18T01:58:31.000Z') + .matchHeader('x-amz-meta-version-operation', 'test-op') + .matchHeader('x-amz-meta-version-comment', 'test 123') + .reply(200, new xml2js.Builder().buildObject({ + CopyObjectResult: { + ETag: '987789', + }, + })); + + const resp = await createVersion(context, 'myorg/mysite/hello.html', 'test-op', 'test 123', etag); + assert.equal(resp.status, 201); + }); + + it('test createVersion on non-existing document gives 404', async () => { + nock.source() + .headObject('/myorg/mysite/hello.html') + .reply(404); + + const resp = await createVersion(context, 'myorg/mysite/hello.html'); + assert.equal(resp.status, 404); + }); + + it('test createVersion with etag, does not retry on failure', async () => { + nock.source() + .headObject('/myorg/mysite/hello.html') + .twice() + .reply(200, null, { + etag: 'foobar', + 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', + 'last-modified': 'Tue, 04 Jun 2024 14:20:00 GMT', + }); + + nock.source() + .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) + .reply(412); + + const resp = await createVersion(context, 'myorg/mysite/hello.html', 'abc', 'def', 'someetag'); + assert.equal(resp.status, 412); + }); +}); diff --git a/test/source/testutils.js b/test/source/testutils.js index cda2117..740364c 100644 --- a/test/source/testutils.js +++ b/test/source/testutils.js @@ -12,13 +12,17 @@ import { createContext } from '../utils.js'; export function setupContext(suffix, { attributes = {}, data = {} } = {}) { - return createContext(suffix, { + const context = createContext(suffix, { attributes, data, env: { HELIX_STORAGE_DISABLE_R2: 'true', }, }); + + const { authInfo } = context.attributes; + authInfo.resolveEmail = () => authInfo.profile?.email; + return context; } export function stripSpaces(str) { diff --git a/test/source/utils.test.js b/test/source/utils.test.js index 00e49d3..ecf31f5 100644 --- a/test/source/utils.test.js +++ b/test/source/utils.test.js @@ -16,7 +16,6 @@ import assert from 'assert'; import { getDocID, getValidHtml, - getS3Key, validateJson, validateMedia, } from '../../src/source/utils.js'; @@ -211,10 +210,6 @@ describe('Source Utils Tests', () => { ); }); - it('test getS3Key', () => { - assert.equal(getS3Key('org1', 'site2', '/a/b/c/'), 'org1/site2/a/b/c/'); - }); - it('test getDocID', () => { const head = { Metadata: { diff --git a/test/source/versions.test.js b/test/source/versions.test.js index 4a37aea..ba54404 100644 --- a/test/source/versions.test.js +++ b/test/source/versions.test.js @@ -13,9 +13,7 @@ /* eslint-env mocha */ /* eslint-disable no-param-reassign */ import assert from 'assert'; -import xml2js from 'xml2js'; -import { getOrListVersions, postVersion } from '../../src/source/versions.js'; -import { MAX_SOURCE_BUCKET_RETRY } from '../../src/source/utils.js'; +import { getOrListVersions } from '../../src/source/versions.js'; import { createInfo, Nock } from '../utils.js'; import { setupContext } from './testutils.js'; @@ -36,192 +34,6 @@ describe('Versions Tests', () => { nock.done(); }); - it('test postVersion', async () => { - let versionId; - - nock.source() - .headObject('/myorg/mysite/a/b/c.html') - .twice() - .reply(200, null, { - etag: 'foobar', - 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', - 'last-modified': 'Fri, 18 Mar 2005 01:58:31 GMT', - }); - - async function copyFn(u) { - const path = u.split('?')[0]; - const prefix = '/myorg/mysite/.versions/01KK1E35DP7EQDG9G99QQAVQ1Z/'; - assert(path.startsWith(prefix)); - versionId = path.slice(prefix.length); - - return new xml2js.Builder().buildObject({ - CopyObjectResult: { - ETag: '123', - }, - }); - } - - nock.source() - .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) - .matchHeader('x-amz-copy-source', 'helix-source-bus/myorg/mysite/a/b/c.html') - .matchHeader('x-amz-metadata-directive', 'REPLACE') - .matchHeader('x-amz-meta-doc-id', '01KK1E35DP7EQDG9G99QQAVQ1Z') - .matchHeader('x-amz-meta-doc-path-hint', '/a/b/c.html') - .matchHeader('x-amz-meta-doc-last-modified', '2005-03-18T01:58:31.000Z') - .matchHeader('x-amz-meta-version-by', 'anonymous') - .matchHeader('x-amz-meta-version-operation', 'testing') - .matchHeader('x-amz-meta-version-comment', 'test comment') - .reply(200, copyFn); - - const resp = await postVersion(context, 'myorg/mysite/a/b/c.html', 'testing', 'test comment'); - assert.equal(resp.status, 201); - assert.equal(resp.headers.get('location'), `/myorg/sites/mysite/source/a/b/c.html/.versions/${versionId}`); - }); - - it('test postVersion precondition failed, retry', async () => { - nock.source() - .headObject('/myorg/mysite/a/b/c.html') - .times(4) - .reply(200, null, { - etag: 'foobar', - 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', - 'last-modified': 'Tue, 04 Jun 2024 14:20:00 GMT', - }); - - nock.source() - .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) - .reply(412) - .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) - .reply(200, new xml2js.Builder().buildObject({ - CopyObjectResult: { - ETag: '123', - }, - })); - - const resp = await postVersion(context, 'myorg/mysite/a/b/c.html'); - assert.equal(resp.status, 201); - }); - - it('test postVersion precondition failed, too many retries', async () => { - nock.source() - .headObject('/myorg/mysite/a/b/c.html') - .times((MAX_SOURCE_BUCKET_RETRY + 1) * 2) - .reply(200, null, { - etag: 'foobar', - 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', - 'last-modified': 'Tue, 04 Jun 2024 14:20:00 GMT', - }); - - nock.source() - .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) - .times(MAX_SOURCE_BUCKET_RETRY + 1) - .reply(412); - - const resp = await postVersion(context, 'myorg/mysite/a/b/c.html', 'abc', 'def'); - assert.equal(resp.status, 412); - }); - - it('test postVersion precondition failed, configured max retries', async () => { - context.attributes.maxSourceBucketRetry = 2; - - nock.source() - .headObject('/myorg/mysite/a/b/c.html') - .times(6) - .reply(200, null, { - etag: 'foobar', - 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', - 'last-modified': 'Tue, 04 Jun 2024 14:20:00 GMT', - }); - - nock.source() - .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) - .times(3) - .reply(412); - - const resp = await postVersion(context, 'myorg/mysite/a/b/c.html', 'abc', 'def'); - assert.equal(resp.status, 412); - }); - - it('test postVersion error', async () => { - nock.source() - .headObject('/myorg/mysite/a/b/c.html') - .twice() - .reply(200, null, { - etag: 'foobar', - 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', - 'last-modified': 'Tue, 04 Jun 2024 14:20:00 GMT', - }); - - nock.source() - .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) - .reply(403); - - const resp = await postVersion(context, 'myorg/mysite/a/b/c.html'); - assert.equal(resp.status, 403); - }); - - it('test postVersion invalid base key', async () => { - const resp = await postVersion(context, '/myorg/mysite/a/b/c.html'); - assert.equal(resp.status, 400); - }); - - it('test postVersion with etag', async () => { - nock.source() - .headObject('/myorg/mysite/hello.html') - .twice() - .reply(200, null, { - etag: 'mwhaha', - 'x-amz-meta-doc-id': '01KMD45QKPY7S9Y7BDKP0E019Q', - 'last-modified': 'Fri, 18 Mar 2005 01:58:31 GMT', - }); - - const etag = 'foobar'; - nock.source() - .copyObject(/myorg\/mysite\/.versions\/01KMD45QKPY7S9Y7BDKP0E019Q\/.+/) - .matchHeader('x-amz-copy-source', 'helix-source-bus/myorg/mysite/hello.html') - .matchHeader('x-amz-copy-source-if-match', etag) - .matchHeader('x-amz-meta-doc-id', '01KMD45QKPY7S9Y7BDKP0E019Q') - .matchHeader('x-amz-meta-doc-path-hint', '/hello.html') - .matchHeader('x-amz-meta-doc-last-modified', '2005-03-18T01:58:31.000Z') - .matchHeader('x-amz-meta-version-operation', 'test-op') - .matchHeader('x-amz-meta-version-comment', 'test 123') - .reply(200, new xml2js.Builder().buildObject({ - CopyObjectResult: { - ETag: '987789', - }, - })); - - const resp = await postVersion(context, 'myorg/mysite/hello.html', 'test-op', 'test 123', etag); - assert.equal(resp.status, 201); - }); - - it('test postVersion on non-existing document gives 404', async () => { - nock.source() - .headObject('/myorg/mysite/hello.html') - .reply(404); - - const resp = await postVersion(context, 'myorg/mysite/hello.html'); - assert.equal(resp.status, 404); - }); - - it('test postVersion with etag, does not retry on failure', async () => { - nock.source() - .headObject('/myorg/mysite/hello.html') - .twice() - .reply(200, null, { - etag: 'foobar', - 'x-amz-meta-doc-id': '01KK1E35DP7EQDG9G99QQAVQ1Z', - 'last-modified': 'Tue, 04 Jun 2024 14:20:00 GMT', - }); - - nock.source() - .copyObject(/myorg\/mysite\/.versions\/01KK1E35DP7EQDG9G99QQAVQ1Z\/.+/) - .reply(412); - - const resp = await postVersion(context, 'myorg/mysite/hello.html', 'abc', 'def', 'someetag'); - assert.equal(resp.status, 412); - }); - const BUCKET_LIST_RESULT = ` my-bucket