Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
4f85101
fix: fully validate txs retrieved from tx file store (backport #21988…
AztecBot Mar 25, 2026
9ab6032
refactor: extract checkpoint proposal handling to ProposalHandler (ba…
AztecBot Mar 25, 2026
1294089
chore: accumulated backports (#22015)
alexghr Mar 25, 2026
0619f00
fix: deploy-staging-public waits for any semver tag instead of releas…
PhilWindle Mar 25, 2026
db55b9c
fix: Fix blob encoding when uploaded from proposals (#22045)
PhilWindle Mar 26, 2026
b594593
chore: Accumulated backports to v4 (#22030)
AztecBot Mar 26, 2026
5152dd5
chore: merging compilation fix (#22127)
benesjan Mar 30, 2026
d8fa4c4
fix: run compatibility tests on all v4-related branches (#22149)
alexghr Mar 30, 2026
4ffd56d
chore: Accumulated backports to v4-next (#22158)
AztecBot Mar 31, 2026
3b97770
chore: Accumulated backports to v4-next (#22174)
AztecBot Mar 31, 2026
2821640
chore: Accumulated backports to v4-next (#22205)
AztecBot Apr 1, 2026
b5a9e97
cherry-pick: fix(archiver): handle duplicate checkpoint from L1 reorg…
AztecBot Apr 2, 2026
7fee115
fix: resolve cherry-pick conflicts
AztecBot Apr 2, 2026
af7324b
fix(archiver): handle duplicate checkpoint from L1 reorg (backport #2…
PhilWindle Apr 2, 2026
e696cf6
chore: Accumulated backports to v4 (#22155)
AztecBot Apr 2, 2026
31afc36
chore: merge v4 into v4-next (conflicts need resolution)
Apr 2, 2026
84a0680
fix: resolve merge conflicts from v4 into v4-next
AztecBot Apr 2, 2026
c92c474
fix: resolve merge conflicts from v4 into v4-next (#22276)
PhilWindle Apr 2, 2026
f8c11df
fix: only create releases in barretenberg repo by default
AztecBot Apr 2, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 14 additions & 15 deletions .github/workflows/deploy-staging-public.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,40 +26,39 @@ jobs:
token: ${{ secrets.AZTEC_BOT_GITHUB_TOKEN }}
fetch-depth: 0

- name: Read version from manifest
id: manifest
run: |
VERSION=$(jq -r '."."' .release-please-manifest.json)
echo "version=$VERSION"
echo "version=$VERSION" >> $GITHUB_OUTPUT

- name: Poll for tag at HEAD
- name: Poll for semver tag at HEAD
id: poll-tag
run: |
# wait for tag to be pushed (either RC or stable release)
VERSION="${{ steps.manifest.outputs.version }}"
HEAD_SHA=$(git rev-parse HEAD)
MAX_ATTEMPTS=60
echo "Looking for tag matching v${VERSION} or v${VERSION}-rc.* at HEAD ($HEAD_SHA)"
echo "Looking for any semver tag at HEAD ($HEAD_SHA)"

for i in $(seq 1 $MAX_ATTEMPTS); do
git fetch --tags --force

TAG=$(git tag --points-at HEAD | grep -E "^v${VERSION}(-rc\.[0-9]+)?$" | sort -V | tail -n 1 || true)
# Collect all valid semver tags pointing at HEAD
SEMVER_TAGS=()
for t in $(git tag --points-at HEAD); do
if ci3/semver check "$t"; then
SEMVER_TAGS+=("$t")
fi
done

if [ -n "$TAG" ]; then
# If we found valid semver tags, pick the highest
if [ ${#SEMVER_TAGS[@]} -gt 0 ]; then
TAG=$(ci3/semver sort "${SEMVER_TAGS[@]}" | tail -n 1)
echo "Found tag: $TAG"
SEMVER="${TAG#v}"
echo "tag=$TAG" >> $GITHUB_OUTPUT
echo "semver=$SEMVER" >> $GITHUB_OUTPUT
exit 0
fi

echo "Attempt $i/$MAX_ATTEMPTS: No matching tag yet, waiting 10s..."
echo "Attempt $i/$MAX_ATTEMPTS: No semver tag yet, waiting 10s..."
sleep 10
done

echo "Error: No tag found for v${VERSION} at HEAD after 10 minutes"
echo "Error: No semver tag found at HEAD after 10 minutes"
exit 1

wait-for-ci3:
Expand Down
46 changes: 16 additions & 30 deletions bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -446,36 +446,22 @@ function bench {
cache_upload bench-$(git rev-parse HEAD^{tree}).tar.gz bench-out/bench.json
}

function release_github {
# Add an easy link for comparing to previous release.
local compare_link=""
if gh release view "v$CURRENT_VERSION" &>/dev/null; then
compare_link=$(echo -e "See changes: https://github.com/AztecProtocol/aztec-packages/compare/v${CURRENT_VERSION}...${COMMIT_HASH}")
fi
# Legacy releases. TODO: Eventually remove.
if gh release view "aztec-packages-v$CURRENT_VERSION" &>/dev/null; then
compare_link=$(echo -e "See changes: https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v${CURRENT_VERSION}...${COMMIT_HASH}")
function release_bb_github {
# Create a GitHub release in AztecProtocol/barretenberg for bb artifacts.
# Users can manually create releases in aztec-packages via the GitHub UI if needed.
local bb_repo="AztecProtocol/barretenberg"
if gh release view "$REF_NAME" --repo "$bb_repo" &>/dev/null; then
return
fi
# Determine if this is a prerelease (has a prerelease tag like -rc.1, -alpha, etc.)
local is_prerelease=false
local prerelease_flag=""
if [ -n "$(semver prerelease $REF_NAME)" ]; then
is_prerelease=true
fi
# Ensure we have a commit release.
if ! gh release view "$REF_NAME" &>/dev/null; then
local prerelease_flag=""
if $is_prerelease; then
prerelease_flag="--prerelease"
fi
do_or_dryrun gh release create "$REF_NAME" \
$prerelease_flag \
--target $COMMIT_HASH \
--title "$REF_NAME" \
--notes "$compare_link"
elif ! $is_prerelease; then
# Release exists but this is not a prerelease version - ensure it's marked as a full release
do_or_dryrun gh release edit "$REF_NAME" --prerelease=false
prerelease_flag="--prerelease"
fi
do_or_dryrun gh release create "$REF_NAME" \
--repo "$bb_repo" \
$prerelease_flag \
--title "$REF_NAME" \
--notes "Release $REF_NAME — see https://github.com/AztecProtocol/aztec-packages/commits/$COMMIT_HASH"
}

function release {
Expand All @@ -495,9 +481,9 @@ function release {
echo_header "release all"
set -x

# Ensure we have a github release for our REF_NAME.
# This is in case were are not going through release-please.
release_github
# Ensure we have a github release in AztecProtocol/barretenberg for bb artifacts.
# Users can create aztec-packages releases manually via the GitHub "Create a release" button.
release_bb_github

projects=(
barretenberg/cpp
Expand Down
50 changes: 50 additions & 0 deletions yarn-project/archiver/src/archiver-sync.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1223,6 +1223,56 @@ describe('Archiver Sync', () => {

expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(2));
}, 15_000);

it('handles L1 reorg that moves a checkpoint to a later L1 block', async () => {
expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(0));

// Sync checkpoints 1 and 2
await fake.addCheckpoint(CheckpointNumber(1), {
l1BlockNumber: 70n,
messagesL1BlockNumber: 50n,
numL1ToL2Messages: 3,
});
const { checkpoint: cp2 } = await fake.addCheckpoint(CheckpointNumber(2), {
l1BlockNumber: 80n,
messagesL1BlockNumber: 60n,
numL1ToL2Messages: 3,
});

fake.setL1BlockNumber(90n);
await archiver.syncImmediate();
expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(2));

// Verify checkpoint 2's blocks are stored
const lastBlockNumber = cp2.blocks.at(-1)!.number;
const tips = await archiver.getL2Tips();
expect(tips.checkpointed.checkpoint.number).toEqual(CheckpointNumber(2));
expect(tips.checkpointed.block.number).toEqual(lastBlockNumber);

// Simulate L1 reorg: checkpoint 2 moves from L1 block 80 to L1 block 85.
// The checkpoint content (blocks, archive) stays the same — only the L1 block changes.
// This causes the archiver to re-discover checkpoint 2 when scanning from block 81 onward.
fake.moveCheckpointToL1Block(CheckpointNumber(2), 85n);

// Advance L1 and sync. The archiver's sync point is at L1 block 80 (from checkpoint 2's
// original insertion). The scan starts from 81, finds checkpoint 2 at block 85, and must
// accept it as a duplicate with updated L1 info rather than throwing.
fake.setL1BlockNumber(95n);
await archiver.syncImmediate();

// The archiver should still be at checkpoint 2 and healthy
expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(2));

// Add checkpoint 3 to verify the archiver can continue syncing after the duplicate
await fake.addCheckpoint(CheckpointNumber(3), {
l1BlockNumber: 100n,
messagesL1BlockNumber: 90n,
numL1ToL2Messages: 3,
});
fake.setL1BlockNumber(110n);
await archiver.syncImmediate();
expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(3));
}, 15_000);
});

describe('finalized checkpoint', () => {
Expand Down
67 changes: 62 additions & 5 deletions yarn-project/archiver/src/store/block_store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -227,21 +227,34 @@ export class BlockStore {
}

return await this.db.transactionAsync(async () => {
// Check that the checkpoint immediately before the first block to be added is present in the store.
const firstCheckpointNumber = checkpoints[0].checkpoint.number;
const previousCheckpointNumber = await this.getLatestCheckpointNumber();

if (previousCheckpointNumber !== firstCheckpointNumber - 1 && !opts.force) {
// Handle already-stored checkpoints at the start of the batch.
// This can happen after an L1 reorg re-includes a checkpoint in a different L1 block.
// We accept them if archives match (same content) and update their L1 metadata.
if (!opts.force && firstCheckpointNumber <= previousCheckpointNumber) {
checkpoints = await this.skipOrUpdateAlreadyStoredCheckpoints(checkpoints, previousCheckpointNumber);
if (checkpoints.length === 0) {
return true;
}
// Re-check sequentiality after skipping
const newFirstNumber = checkpoints[0].checkpoint.number;
if (previousCheckpointNumber !== newFirstNumber - 1) {
throw new InitialCheckpointNumberNotSequentialError(newFirstNumber, previousCheckpointNumber);
}
} else if (previousCheckpointNumber !== firstCheckpointNumber - 1 && !opts.force) {
throw new InitialCheckpointNumberNotSequentialError(firstCheckpointNumber, previousCheckpointNumber);
}

// Extract the previous checkpoint if there is one
const currentFirstCheckpointNumber = checkpoints[0].checkpoint.number;
let previousCheckpointData: CheckpointData | undefined = undefined;
if (previousCheckpointNumber !== INITIAL_CHECKPOINT_NUMBER - 1) {
if (currentFirstCheckpointNumber - 1 !== INITIAL_CHECKPOINT_NUMBER - 1) {
// There should be a previous checkpoint
previousCheckpointData = await this.getCheckpointData(previousCheckpointNumber);
previousCheckpointData = await this.getCheckpointData(CheckpointNumber(currentFirstCheckpointNumber - 1));
if (previousCheckpointData === undefined) {
throw new CheckpointNotFoundError(previousCheckpointNumber);
throw new CheckpointNotFoundError(CheckpointNumber(currentFirstCheckpointNumber - 1));
}
}

Expand Down Expand Up @@ -331,6 +344,50 @@ export class BlockStore {
});
}

/**
* Handles checkpoints at the start of a batch that are already stored (e.g. due to L1 reorg).
* Verifies the archive root matches, updates L1 metadata, and returns only the new checkpoints.
*/
private async skipOrUpdateAlreadyStoredCheckpoints(
checkpoints: PublishedCheckpoint[],
latestStored: CheckpointNumber,
): Promise<PublishedCheckpoint[]> {
let i = 0;
for (; i < checkpoints.length && checkpoints[i].checkpoint.number <= latestStored; i++) {
const incoming = checkpoints[i];
const stored = await this.getCheckpointData(incoming.checkpoint.number);
if (!stored) {
// Should not happen if latestStored is correct, but be safe
break;
}
// Verify the checkpoint content matches (archive root)
if (!stored.archive.root.equals(incoming.checkpoint.archive.root)) {
throw new Error(
`Checkpoint ${incoming.checkpoint.number} already exists in store but with a different archive root. ` +
`Stored: ${stored.archive.root}, incoming: ${incoming.checkpoint.archive.root}`,
);
}
// Update L1 metadata and attestations for the already-stored checkpoint
this.#log.warn(
`Checkpoint ${incoming.checkpoint.number} already stored, updating L1 info ` +
`(L1 block ${stored.l1.blockNumber} -> ${incoming.l1.blockNumber})`,
);
await this.#checkpoints.set(incoming.checkpoint.number, {
header: incoming.checkpoint.header.toBuffer(),
archive: incoming.checkpoint.archive.toBuffer(),
checkpointOutHash: incoming.checkpoint.getCheckpointOutHash().toBuffer(),
l1: incoming.l1.toBuffer(),
attestations: incoming.attestations.map(a => a.toBuffer()),
checkpointNumber: incoming.checkpoint.number,
startBlock: incoming.checkpoint.blocks[0].number,
blockCount: incoming.checkpoint.blocks.length,
});
// Update the sync point to reflect the new L1 block
await this.#lastSynchedL1Block.set(incoming.l1.blockNumber);
}
return checkpoints.slice(i);
}

private async addBlockToDatabase(block: L2Block, checkpointNumber: number, indexWithinCheckpoint: number) {
const blockHash = await block.hash();

Expand Down
59 changes: 53 additions & 6 deletions yarn-project/archiver/src/store/kv_archiver_store.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ import {
makeInboxMessage,
makeInboxMessages,
makeInboxMessagesWithFullBlocks,
makeL1PublishedData,
makePrivateLog,
makePrivateLogTag,
makePublicLog,
Expand Down Expand Up @@ -134,10 +135,56 @@ describe('KVArchiverDataStore', () => {
await expect(store.addCheckpoints(publishedCheckpoints)).resolves.toBe(true);
});

it('throws on duplicate checkpoints', async () => {
await store.addCheckpoints(publishedCheckpoints);
await expect(store.addCheckpoints(publishedCheckpoints)).rejects.toThrow(
InitialCheckpointNumberNotSequentialError,
it('accepts duplicate checkpoints with matching archives and updates L1 info', async () => {
// Add first 3 checkpoints
const first3 = publishedCheckpoints.slice(0, 3);
await store.addCheckpoints(first3);

// Verify initial L1 block number for checkpoint 3
const beforeData = await store.getCheckpointData(CheckpointNumber(3));
expect(beforeData).toBeDefined();
const originalL1Block = beforeData!.l1.blockNumber;

// Re-add checkpoint 3 with the same content but different L1 published data
// This simulates an L1 reorg that moved the checkpoint to a different L1 block
const cp3WithNewL1 = new PublishedCheckpoint(
first3[2].checkpoint,
makeL1PublishedData(999),
first3[2].attestations,
);
// Also add checkpoint 4 (the next one) in the same batch
await store.addCheckpoints([cp3WithNewL1, publishedCheckpoints[3]]);

// Checkpoint 3's L1 info should be updated
const afterData = await store.getCheckpointData(CheckpointNumber(3));
expect(afterData).toBeDefined();
expect(afterData!.l1.blockNumber).toEqual(999n);
expect(afterData!.l1.blockNumber).not.toEqual(originalL1Block);

// Checkpoint 4 should be stored
expect(await store.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(4));
});

it('accepts a batch that is entirely already-stored checkpoints', async () => {
const first3 = publishedCheckpoints.slice(0, 3);
await store.addCheckpoints(first3);

// Re-add the same 3 checkpoints — should succeed without error
await expect(store.addCheckpoints(first3)).resolves.toBe(true);
});

it('throws on duplicate checkpoints with mismatching archives', async () => {
const first3 = publishedCheckpoints.slice(0, 3);
await store.addCheckpoints(first3);

// Create a fake checkpoint 3 with a different archive root (content mismatch)
const differentCheckpoint3 = await Checkpoint.random(CheckpointNumber(3), {
numBlocks: 1,
startBlockNumber: 3,
});
const mismatchedCp3 = makePublishedCheckpoint(differentCheckpoint3, 999);
await expect(store.addCheckpoints([mismatchedCp3])).rejects.toThrow(
'already exists in store but with a different archive',
);
});

Expand Down Expand Up @@ -274,7 +321,7 @@ describe('KVArchiverDataStore', () => {
await expect(store.addCheckpoints([publishedCheckpoint])).resolves.toBe(true);
});

it('throws on duplicate initial checkpoint', async () => {
it('throws on duplicate checkpoint with different content', async () => {
const block1 = await L2Block.random(BlockNumber(1), {
checkpointNumber: CheckpointNumber(1),
indexWithinCheckpoint: IndexWithinCheckpoint(0),
Expand Down Expand Up @@ -303,7 +350,7 @@ describe('KVArchiverDataStore', () => {

await expect(store.addCheckpoints([publishedCheckpoint])).resolves.toBe(true);
await expect(store.addCheckpoints([publishedCheckpoint2])).rejects.toThrow(
InitialCheckpointNumberNotSequentialError,
'already exists in store but with a different archive',
);
});
});
Expand Down
15 changes: 15 additions & 0 deletions yarn-project/archiver/src/test/fake_l1_state.ts
Original file line number Diff line number Diff line change
Expand Up @@ -331,6 +331,21 @@ export class FakeL1State {
this.updatePendingCheckpointNumber();
}

/**
* Moves a checkpoint to a different L1 block number (simulates L1 reorg that
* re-includes the same checkpoint transaction in a different block).
* The checkpoint content stays the same — only the L1 metadata changes.
* Auto-updates pending status.
*/
moveCheckpointToL1Block(checkpointNumber: CheckpointNumber, newL1BlockNumber: bigint): void {
for (const cpData of this.checkpoints) {
if (cpData.checkpointNumber === checkpointNumber) {
cpData.l1BlockNumber = newL1BlockNumber;
}
}
this.updatePendingCheckpointNumber();
}

/**
* Removes messages after a given total index (simulates L1 reorg).
* Auto-updates rolling hash.
Expand Down
Loading
Loading