Skip to content
Open
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 63 additions & 11 deletions src/source/delete.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,30 +12,82 @@
import { Response } from '@adobe/fetch';
import { HelixStorage } from '@adobe/helix-shared-storage';
import { createErrorResponse } from '../contentbus/utils.js';
import { deleteFolder } from './folder.js';
import { getS3KeyFromInfo } from './utils.js';
import { RequestInfo } from '../support/RequestInfo.js';
import { StatusCodeError } from '../support/StatusCodeError.js';
import { CopyOptions, copyDocument, copyFolder } from './source-client.js';
import { getDocPathFromS3Key, getS3Key, getS3KeyFromInfo } from './s3-path-utils.js';

/**
* Delete from the source bus.
* Trash a folder by moving all of its contents to the trash in the same folder structure.
* If the trash already contains a folder with this name, a base-36 encoded timestamp is appended.
*
* @param {import('../support/AdminContext').AdminContext} context context
* @param {import('../support/RequestInfo').RequestInfo} info request info
* @return {Promise<Response>} response
* @returns {Promise<Response>} response, status 204 if successful.
*/
async function trashFolder(context, info) {
const bucket = HelixStorage.fromContext(context).sourceBus();

const destDir = `/.trash/${info.rawPath.split('/').at(-2)}`;

// Ensure that there is no folder in the trash with this name yet
const listResp = await bucket.list(`${getS3Key(info.org, info.site, destDir)}/`, { shallow: true });
const destPath = listResp.length > 0 ? `${destDir}-${Date.now().toString(36)}/` : `${destDir}/`;

const srcKey = getS3Key(info.org, info.site, info.rawPath);
const newInfo = RequestInfo.clone(info, { path: destPath });
const copyOpts = (sKey) => ({ addMetadata: { 'doc-path': getDocPathFromS3Key(sKey) } });

try {
const resp = await copyFolder(context, new CopyOptions({
src: srcKey, info: newInfo, move: true, fnOpts: copyOpts, collOpts: { collision: 'unique' },
}));
if (resp.length > 0) {
return new Response('', { status: 204 });
}
throw new StatusCodeError('Trashing of folder failed', 500);
} catch (e) {
const opts = { e, log: context.log };
opts.status = e.$metadata?.httpStatusCode;
return createErrorResponse(opts);
}
}

/**
* Delete from the source bus, which means moving it to the trash. Both
* documents and folders are supported. The trashed documents gets an extra
* metadata field 'doc-path' which is the path where it was deleted from.
*
* @param {import('../support/AdminContext').AdminContext} context context
* @param {import('../support/RequestInfo').RequestInfo} info request info
* @return {Promise<Response>} response, status 204 if successful.
*/
export async function deleteSource(context, info) {
if (info.rawPath.endsWith('/')) {
return deleteFolder(context, info);
return trashFolder(context, info);
}
const { log } = context;

const bucket = HelixStorage.fromContext(context).sourceBus();
const key = getS3KeyFromInfo(info);
// Trash a document.
const docName = info.rawPath.split('/').pop();
const srcKey = getS3KeyFromInfo(info);
const newInfo = RequestInfo.clone(info, { path: `/.trash/${docName}` });
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

is this desired? why don't you include the path somehow? otherwise, deleting common files, like index.html will end up overwriting.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is similar to how a Trash works in an OS environment, e.g. on Mac OS individually deleted files end up in the root of the trash, if there is already a file with that name, they get a unique suffix:

Image

We do a similar thing. So files are not overwritten, but if one with the name already exists, it will get a unique suffix (alpha sortable to get the order in which they are deleted):

Image

const copyOpts = {
addMetadata: {
'doc-path': info.resourcePath,
},
};
const copyOptions = new CopyOptions({
src: srcKey, info: newInfo, move: true, opts: copyOpts, collOpts: { collision: 'unique' },
});

try {
const resp = await bucket.remove(key);
return new Response('', { status: resp.$metadata?.httpStatusCode });
const resp = await copyDocument(context, copyOptions);
if (resp.length !== 1) {
throw new StatusCodeError('Trashing of document failed', 500);
}
return new Response('', { status: 204 });
} catch (e) {
const opts = { e, log };
const opts = { e, log: context.log };
opts.status = e.$metadata?.httpStatusCode;
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@dominique-pfister since delete now doesn't do bucket.remove() any more but rather a copy, we need to go back to checking e.$metadata?.httpStatusCode am I correct?

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think so: AFAICS, Bucket operations catch all errors thrown and rethrow an exception where the information in $metadata is available in the exception, e.g.:
https://github.com/adobe/helix-shared/blob/726a38139542da81fc0a1b26a414339b8ba6562b/packages/helix-shared-storage/src/storage.js#L430-L438

return createErrorResponse(opts);
}
Expand Down
4 changes: 3 additions & 1 deletion src/source/folder.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@ import { sanitizePath } from '@adobe/helix-shared-string';
import { createErrorResponse } from '../contentbus/utils.js';
import { splitExtension } from '../support/RequestInfo.js';
import { StatusCodeError } from '../support/StatusCodeError.js';
import { getS3Key, storeSourceFile, CONTENT_TYPES } from './utils.js';
import { getS3Key } from './s3-path-utils.js';
import { storeSourceFile } from './source-client.js';
import { CONTENT_TYPES } from './utils.js';

/**
* A folder is marked by a marker file. This allows folder to show up in bucket
Expand Down
5 changes: 3 additions & 2 deletions src/source/get.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@
*/
import { createErrorResponse } from '../contentbus/utils.js';
import { listFolder } from './folder.js';
import { accessSourceFile, getS3KeyFromInfo } from './utils.js';
import { getOrListVersions, VERSION_FOLDER } from './versions.js';
import { accessSourceFile, VERSION_FOLDER } from './source-client.js';
import { getS3KeyFromInfo } from './s3-path-utils.js';
import { getOrListVersions } from './versions.js';

async function accessSource(context, info, headRequest) {
if (info.rawPath.endsWith('/')) {
Expand Down
12 changes: 4 additions & 8 deletions src/source/post.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,9 @@
import { createErrorResponse } from '../contentbus/utils.js';
import { createFolder } from './folder.js';
import { checkConditionals } from './header-utils.js';
import {
contentTypeFromExtension,
getS3KeyFromInfo,
getValidPayload,
storeSourceFile,
} from './utils.js';
import { postVersion, VERSION_FOLDER } from './versions.js';
import { getS3KeyFromInfo } from './s3-path-utils.js';
import { storeSourceFile, createVersion, VERSION_FOLDER } from './source-client.js';
import { contentTypeFromExtension, getValidPayload } from './utils.js';

/**
* Handle POST requests to the source bus.
Expand All @@ -38,7 +34,7 @@ export async function postSource(context, info) {
const operation = String(context.data.operation || '');
const comment = String(context.data.comment || '');

return postVersion(context, baseKey, operation, comment);
return createVersion(context, baseKey, operation, comment);
}

try {
Expand Down
191 changes: 19 additions & 172 deletions src/source/put.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,172 +10,13 @@
* governing permissions and limitations under the License.
*/
import { Response } from '@adobe/fetch';
import processQueue from '@adobe/helix-shared-process-queue';
import { HelixStorage } from '@adobe/helix-shared-storage';
import { ulid } from 'ulid';
import { createErrorResponse } from '../contentbus/utils.js';
import { StatusCodeError } from '../support/StatusCodeError.js';
import { checkConditionals } from './header-utils.js';
import { getDocPathFromS3Key, getS3Key, getS3KeyFromInfo } from './s3-path-utils.js';
import {
contentTypeFromExtension,
getS3KeyFromInfo,
getS3Key,
getDocID,
getValidPayload,
storeSourceFile,
MAX_SOURCE_BUCKET_RETRY,
} from './utils.js';
import { postVersion } from './versions.js';

/**
* Copy an S3 object and handle conflichts.
*
* @param {import('../support/AdminContext').AdminContext} context context
* @param {string} srcKey source S3 key
* @param {string} destKey destination S3 key
* @param {boolean} move true if this is a move operation
* @param {object} initialOpts metadata options for the copy operation
* @param {object} collOpts collision options (e.g { copy: 'overwrite' } )
*/
async function copyWithRetry(
context,
srcKey,
destKey,
move,
initialOpts,
collOpts,
) {
const bucket = HelixStorage.fromContext(context).sourceBus();
let opts = initialOpts;

// We start with assuming that there is nothing at the destination, the happy path
let copyOpts = { IfNoneMatch: '*' };

const maxRetry = context.attributes.maxSourceBucketRetry ?? MAX_SOURCE_BUCKET_RETRY;
let attempt = 0;
while (true) {
try {
const allOpts = { copyOpts, ...opts };
// eslint-disable-next-line no-await-in-loop
await bucket.copy(srcKey, destKey, allOpts);

break; // copy was successful, break out of the loop - we're done!
} catch (e) {
attempt += 1;
if (attempt > maxRetry) {
throw e;
}

const status = e.$metadata?.httpStatusCode;

// As per S3 docs, retry on a 409
if (status !== 409) {
if (status !== 412) {
throw e;
}
// 412: precondition failed - something is at the destination already.

if (move) {
// TODO add move collision handling
throw new StatusCodeError('Collision: something is at the destination already', 409);
} else {
if (collOpts.copy !== 'overwrite') {
throw new StatusCodeError('Collision: something is at the destination already, no overwrite option provided', 409);
}

// eslint-disable-next-line no-await-in-loop
const dest = await bucket.head(destKey);

// version what's there before overwriting it, provide the destination ETag so that we
// know we're versioning what we just did a head() of.
// eslint-disable-next-line no-await-in-loop
const versionResp = await postVersion(context, destKey, 'copy', 'Version created before overwrite', dest.ETag);
if (versionResp.status !== 201) {
if (versionResp.status !== 412 && versionResp.status !== 409) {
throw new StatusCodeError('Failed to version the destination', versionResp.status);
}
} else {
// Creating the version was successful, so we can now copy over the destination.

const getDestDocId = getDocID(dest);

// If something is at the destination already, we copy over that file, but keep
// the doc ID from the destination as-is so that the destination keeps its history.
opts = { ...initialOpts, addMetadata: { 'doc-id': getDestDocId } };

// Now only copy over the destination if it's still the same as what we did a head() of
copyOpts = { IfMatch: dest.ETag };
}
}
}
}
}

if (move) {
const resp = await bucket.remove(srcKey);
if (resp.$metadata?.httpStatusCode !== 204) {
throw new StatusCodeError(`Failed to remove source: ${srcKey}`, resp.$metadata?.httpStatusCode);
}
}
}

async function copyFile(context, srcKey, destKey, move, collOpts) {
const opts = {};
if (!move) {
opts.addMetadata = { 'doc-id': ulid() };
}
await copyWithRetry(context, srcKey, destKey, move, opts, collOpts);
}

/**
* Copies a document from the source to the destination.
*
* @param {import('../support/AdminContext').AdminContext} context context
* @param {string} src source S3 key
* @param {import('../support/RequestInfo').RequestInfo} info destination info
* @param {boolean} move whether to move the source
* @param {object} collOpts collision options
* @returns {Promise<Array<{src: string, dst: string}>>} the copied file details
*/
async function copyDocument(context, src, info, move, collOpts) {
const dst = getS3KeyFromInfo(info);
await copyFile(context, src, dst, move, collOpts);
return [{ src, dst }];
}

/**
* Copies a folder from the source to the destination.
*
* @param {import('../support/AdminContext').AdminContext} context context
* @param {string} srcKey source S3 key
* @param {import('../support/RequestInfo').RequestInfo} info destination info
* @param {boolean} move whether to move the source
* @param {object} collOpts collision options
* @returns {Promise<Array<{src: string, dst: string}>>} the copied files
*/
async function copyFolder(context, srcKey, info, move, collOpts) {
const tasks = [];
const destKey = getS3Key(info.org, info.site, info.rawPath);

if (destKey.startsWith(srcKey)) {
throw new StatusCodeError('Destination cannot be a subfolder of source', 400);
}

const bucket = HelixStorage.fromContext(context).sourceBus();
(await bucket.list(srcKey)).forEach((obj) => {
tasks.push({
src: obj.key,
dst: `${destKey}${obj.path}`,
});
});

const copied = [];
await processQueue(tasks, async (task) => {
await copyFile(context, task.src, task.dst, move, collOpts);
copied.push({ src: task.src, dst: task.dst });
});
return copied;
}
CopyOptions, copyFolder, copyDocument, storeSourceFile,
} from './source-client.js';
import { contentTypeFromExtension, getValidPayload } from './utils.js';

/**
* Copies a resource of a folder to the destination folder. If a folder is
Expand All @@ -198,13 +39,22 @@ async function copySource(context, info, move, collOpts) {
return createErrorResponse({ status: 400, msg: 'Source and destination type mismatch', log });
}

const copyOpts = new CopyOptions({
src: srcKey, info, move, collOpts,
});
const copied = isFolder
? await copyFolder(context, srcKey, info, move, collOpts)
: await copyDocument(context, srcKey, info, move, collOpts);
? await copyFolder(context, copyOpts)
: await copyDocument(context, copyOpts);

// The copied paths returned are without the org and site segments
const copiedPaths = copied.map((c) => ({
src: getDocPathFromS3Key(c.src),
dst: getDocPathFromS3Key(c.dst),
}));

const operation = move ? 'moved' : 'copied';
return new Response({
[operation]: copied,
[operation]: copiedPaths,
});
} catch (e) {
const opts = { e, log };
Expand All @@ -223,12 +73,9 @@ async function copySource(context, info, move, collOpts) {
export async function putSource(context, info) {
if (context.data.source) {
const move = String(context.data.move) === 'true';
const collOpts = {};
if (move) {
collOpts.move = context.data.collision;
} else {
collOpts.copy = context.data.collision;
}
const collOpts = {
collision: context.data.collision,
};
return copySource(context, info, move, collOpts);
}

Expand Down
Loading
Loading