Skip to content
Draft
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 67 additions & 0 deletions .github/workflows/e2e-blob.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
name: E2E Blob Storage

on:
workflow_dispatch:
schedule:
- cron: "0 8 * * 0" # Sundays at 08:00 UTC

permissions:
contents: read

jobs:
e2e-blob:
runs-on: ubuntu-latest
name: E2E blob upload/download

steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: 22
cache: "npm"

- name: Install npm packages
run: |
npm ci --ignore-scripts
npm run ci:postinstall

- name: Run E2E blob storage tests
env:
MOPS_TEST_E2E: "1"
MOPS_NETWORK: staging
run: |
cd cli && NODE_OPTIONS="--experimental-vm-modules" \
npx jest tests/e2e-blob-storage.test.ts --testTimeout 180000

e2e-blob-publish:
runs-on: ubuntu-latest
name: E2E blob publish + install
if: github.event_name == 'workflow_dispatch'

steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: 22
cache: "npm"

- name: Install npm packages
run: |
npm ci --ignore-scripts
npm run ci:postinstall

- name: Write identity PEM
env:
MOPS_IDENTITY_PEM: ${{ secrets.MOPS_IDENTITY_PEM }}
run: |
mkdir -p ~/.config/mops
echo "$MOPS_IDENTITY_PEM" > ~/.config/mops/identity.pem

- name: Run E2E publish + install tests
env:
MOPS_TEST_E2E: "1"
MOPS_IDENTITY_PEM: ${{ secrets.MOPS_IDENTITY_PEM }}
MOPS_NETWORK: staging
run: |
cd cli && NODE_OPTIONS="--experimental-vm-modules" \
npx jest tests/e2e-blob-storage.test.ts --testTimeout 180000
98 changes: 90 additions & 8 deletions backend/main/PackagePublisher.mo
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,23 @@ module {
path : Text;
};

type PackageId = Types.PackageId;

func _isValidBlobHash(hash : Text) : Bool {
if (Text.size(hash) != 71) return false;
if (not Text.startsWith(hash, #text("sha256:"))) return false;
let hexPart = switch (Text.stripStart(hash, #text("sha256:"))) {
case null return false;
case (?h) h;
};
for (c in hexPart.chars()) {
if (not ((c >= '0' and c <= '9') or (c >= 'a' and c <= 'f'))) {
return false;
};
};
true;
};

public class PackagePublisher(registry : Registry.Registry, storageManager : StorageManager.StorageManager) {
let MAX_PACKAGE_FILES = 1000;
let MAX_PACKAGE_SIZE = 1024 * 1024 * 28; // 28MB
Expand All @@ -55,12 +72,11 @@ module {
let publishingBenchmarks = TrieMap.TrieMap<PublishingId, Benchmarks>(Text.equal, Text.hash);
let publishingDocsCoverage = TrieMap.TrieMap<PublishingId, Float>(Text.equal, Text.hash);

public func startPublish(caller : Principal, config : PackageConfigV3) : async Result.Result<PublishingId, PublishingErr> {
func _validatePublishConfig(caller : Principal, config : PackageConfigV3) : Result.Result<(), PublishingErr> {
if (Principal.isAnonymous(caller)) {
return #err("Unauthorized");
};

// validate config
switch (validateConfig(config)) {
case (#ok) {};
case (#err(err)) {
Expand All @@ -70,12 +86,10 @@ module {

let isNewPackage = registry.getHighestVersion(config.name) == null;

// check permissions
if (not isNewPackage and not registry.isOwner(config.name, caller) and not registry.isMaintainer(config.name, caller)) {
return #err("Only owners and maintainers can publish packages");
};

// deny '.' and '_' in name for new packages
if (isNewPackage) {
for (char in config.name.chars()) {
let err = #err("invalid config: unexpected char '" # Char.toText(char) # "' in name '" # config.name # "'");
Expand All @@ -85,7 +99,6 @@ module {
};
};

// check if the same version is published
switch (registry.getPackageVersions(config.name)) {
case (?versions) {
let sameVersionOpt = Array.find<PackageVersion>(
Expand All @@ -101,7 +114,6 @@ module {
case (null) {};
};

// check dependencies
for (dep in config.dependencies.vals()) {
let packageId = PackageUtils.getPackageId(dep.name, dep.version);
if (dep.repo.size() == 0 and registry.getPackageConfig(PackageUtils.getDepName(dep.name), dep.version) == null) {
Expand All @@ -112,14 +124,22 @@ module {
};
};

// check devDependencies
for (dep in config.devDependencies.vals()) {
let packageId = PackageUtils.getPackageId(dep.name, dep.version);
if (dep.repo.size() == 0 and registry.getPackageConfig(PackageUtils.getDepName(dep.name), dep.version) == null) {
return #err("Dev Dependency " # packageId # " not found in registry");
};
};

#ok;
};

public func startPublish(caller : Principal, config : PackageConfigV3) : async Result.Result<PublishingId, PublishingErr> {
switch (_validatePublishConfig(caller, config)) {
case (#err(err)) return #err(err);
case (#ok) {};
};

let publishingId = await generateId();

if (publishingPackages.get(publishingId) != null) {
Expand All @@ -128,7 +148,6 @@ module {

await storageManager.ensureUploadableStorages();

// start
publishingPackages.put(
publishingId,
{
Expand All @@ -139,7 +158,32 @@ module {
},
);
publishingFiles.put(publishingId, Buffer.Buffer(10));
publishingPackageFileStats.put(publishingId, PackageUtils.defaultPackageFileStats());

#ok(publishingId);
};

public func startBlobPublish(caller : Principal, config : PackageConfigV3) : async Result.Result<PublishingId, PublishingErr> {
switch (_validatePublishConfig(caller, config)) {
case (#err(err)) return #err(err);
case (#ok) {};
};

let publishingId = await generateId();

if (publishingPackages.get(publishingId) != null) {
return #err("Already publishing");
};

publishingPackages.put(
publishingId,
{
time = Time.now();
user = caller;
config = config;
storage = Principal.fromText("aaaaa-aa");
},
);
publishingPackageFileStats.put(publishingId, PackageUtils.defaultPackageFileStats());

#ok(publishingId);
Expand Down Expand Up @@ -498,6 +542,44 @@ module {
});
};

public func finishBlobPublish(caller : Principal, publishingId : PublishingId, blobHash : Text) : async Result.Result<{ config : PackageConfigV3; publication : PackagePublication; isNewPackage : Bool }, PublishingErr> {
assert (not Principal.isAnonymous(caller));

if (not _isValidBlobHash(blobHash)) {
return #err("Invalid blob hash format. Expected 'sha256:<64-lowercase-hex-chars>'");
};

Comment on lines +531 to +552
Copy link

Copilot AI Apr 9, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

finishBlobPublish bypasses the existing package size enforcement (MAX_PACKAGE_SIZE) used for per-file uploads, and publishingPackageFileStats stays at its default values for blob publishes. This enables publishing arbitrarily large archives and makes stored file stats inaccurate. Consider passing blob size (and ideally file count) from the CLI to the canister and validating it here (or adding an equivalent enforcement mechanism for blob uploads).

Copilot uses AI. Check for mistakes.
let ?publishing = publishingPackages.get(publishingId) else return #err("Publishing package not found");
assert (publishing.user == caller);

let isNewPackage = registry.getHighestVersion(publishing.config.name) == null;

let publication = registry.newBlobPackageRelease({
userId = caller;
config = publishing.config;
notes = Option.get(publishingNotes.get(publishingId), "");
blobHash = blobHash;
benchmarks = Option.get(publishingBenchmarks.get(publishingId), []);
fileStats = publishingPackageFileStats.get(publishingId);
testStats = publishingTestStats.get(publishingId);
docsCoverage = Option.get(publishingDocsCoverage.get(publishingId), 0.0);
});

publishingFiles.delete(publishingId);
publishingPackages.delete(publishingId);
publishingPackageFileStats.delete(publishingId);
publishingTestStats.delete(publishingId);
publishingNotes.delete(publishingId);
publishingBenchmarks.delete(publishingId);
publishingDocsCoverage.delete(publishingId);

#ok({
config = publishing.config;
publication;
isNewPackage;
});
};

func _checkPublishingPackageSize(publishingId : PublishingId) : Result.Result<(), PublishingErr> {
switch (publishingPackageFileStats.get(publishingId)) {
case (?fileStats) {
Expand Down
Loading
Loading