diff --git a/lexicons/com/atproto/sync/getCheckout.json b/lexicons/com/atproto/sync/getCheckout.json new file mode 100644 index 00000000000..5126eca6b3e --- /dev/null +++ b/lexicons/com/atproto/sync/getCheckout.json @@ -0,0 +1,21 @@ +{ + "lexicon": 1, + "id": "com.atproto.sync.getCheckout", + "defs": { + "main": { + "type": "query", + "description": "Gets the repo state.", + "parameters": { + "type": "params", + "required": ["did"], + "properties": { + "did": {"type": "string", "description": "The DID of the repo."}, + "commit": {"type": "string", "description": "The commit to get the checkout from. Defaults to current HEAD."} + } + }, + "output": { + "encoding": "application/cbor" + } + } + } +} \ No newline at end of file diff --git a/lexicons/com/atproto/sync/getCommitPath.json b/lexicons/com/atproto/sync/getCommitPath.json new file mode 100644 index 00000000000..adfb5b8eb1b --- /dev/null +++ b/lexicons/com/atproto/sync/getCommitPath.json @@ -0,0 +1,32 @@ +{ + "lexicon": 1, + "id": "com.atproto.sync.getCommitPath", + "defs": { + "main": { + "type": "query", + "description": "Gets the path of repo commits", + "parameters": { + "type": "params", + "required": ["did"], + "properties": { + "did": {"type": "string", "description": "The DID of the repo."}, + "latest": { "type": "string", "description": "The most recent commit"}, + "earliest": { "type": "string", "description": "The earliest commit to start from"} + } + }, + "output": { + "encoding": "application/json", + "schema": { + "type": "object", + "required": ["commits"], + "properties": { + "commits": { + "type": "array", + "items": { "type": "string" } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/lexicons/com/atproto/sync/getRoot.json b/lexicons/com/atproto/sync/getHead.json similarity index 83% rename from lexicons/com/atproto/sync/getRoot.json rename to lexicons/com/atproto/sync/getHead.json index 93255fb2477..bcb713bcd86 100644 --- a/lexicons/com/atproto/sync/getRoot.json +++ b/lexicons/com/atproto/sync/getHead.json @@ -1,10 +1,10 @@ { "lexicon": 1, - "id": "com.atproto.sync.getRoot", + "id": "com.atproto.sync.getHead", "defs": { "main": { "type": "query", - "description": "Gets the current root CID of a repo.", + "description": "Gets the current HEAD CID of a repo.", "parameters": { "type": "params", "required": ["did"], diff --git a/packages/README.md b/packages/README.md index bb718fa5651..d7239708adc 100644 --- a/packages/README.md +++ b/packages/README.md @@ -9,7 +9,6 @@ ## Libraries - [API](./api): A library for communicating with ATP servers. -- [Auth](./auth): ATP's core permissioning library (based on UCANs). - [Common](./common): A library containing code which is shared between ATP packages. - [Crypto](./crypto): ATP's common cryptographic operations. - [DID Resolver](./did-resolver): A library for resolving ATP's Decentralized ID methods. diff --git a/packages/api/src/client/index.ts b/packages/api/src/client/index.ts index e3118b00e4d..9a39b410946 100644 --- a/packages/api/src/client/index.ts +++ b/packages/api/src/client/index.ts @@ -27,8 +27,10 @@ import * as ComAtprotoSessionCreate from './types/com/atproto/session/create' import * as ComAtprotoSessionDelete from './types/com/atproto/session/delete' import * as ComAtprotoSessionGet from './types/com/atproto/session/get' import * as ComAtprotoSessionRefresh from './types/com/atproto/session/refresh' +import * as ComAtprotoSyncGetCheckout from './types/com/atproto/sync/getCheckout' +import * as ComAtprotoSyncGetCommitPath from './types/com/atproto/sync/getCommitPath' +import * as ComAtprotoSyncGetHead from './types/com/atproto/sync/getHead' import * as ComAtprotoSyncGetRepo from './types/com/atproto/sync/getRepo' -import * as ComAtprotoSyncGetRoot from './types/com/atproto/sync/getRoot' import * as ComAtprotoSyncUpdateRepo from './types/com/atproto/sync/updateRepo' import * as AppBskyActorCreateScene from './types/app/bsky/actor/createScene' import * as AppBskyActorGetProfile from './types/app/bsky/actor/getProfile' @@ -90,8 +92,10 @@ export * as ComAtprotoSessionCreate from './types/com/atproto/session/create' export * as ComAtprotoSessionDelete from './types/com/atproto/session/delete' export * as ComAtprotoSessionGet from './types/com/atproto/session/get' export * as ComAtprotoSessionRefresh from './types/com/atproto/session/refresh' +export * as ComAtprotoSyncGetCheckout from './types/com/atproto/sync/getCheckout' +export * as ComAtprotoSyncGetCommitPath from './types/com/atproto/sync/getCommitPath' +export * as ComAtprotoSyncGetHead from './types/com/atproto/sync/getHead' export * as ComAtprotoSyncGetRepo from './types/com/atproto/sync/getRepo' -export * as ComAtprotoSyncGetRoot from './types/com/atproto/sync/getRoot' export * as ComAtprotoSyncUpdateRepo from './types/com/atproto/sync/updateRepo' export * as AppBskyActorCreateScene from './types/app/bsky/actor/createScene' export * as AppBskyActorGetProfile from './types/app/bsky/actor/getProfile' @@ -481,6 +485,39 @@ export class SyncNS { this._service = service } + getCheckout( + params?: ComAtprotoSyncGetCheckout.QueryParams, + opts?: ComAtprotoSyncGetCheckout.CallOptions, + ): Promise { + return this._service.xrpc + .call('com.atproto.sync.getCheckout', params, undefined, opts) + .catch((e) => { + throw ComAtprotoSyncGetCheckout.toKnownErr(e) + }) + } + + getCommitPath( + params?: ComAtprotoSyncGetCommitPath.QueryParams, + opts?: ComAtprotoSyncGetCommitPath.CallOptions, + ): Promise { + return this._service.xrpc + .call('com.atproto.sync.getCommitPath', params, undefined, opts) + .catch((e) => { + throw ComAtprotoSyncGetCommitPath.toKnownErr(e) + }) + } + + getHead( + params?: ComAtprotoSyncGetHead.QueryParams, + opts?: ComAtprotoSyncGetHead.CallOptions, + ): Promise { + return this._service.xrpc + .call('com.atproto.sync.getHead', params, undefined, opts) + .catch((e) => { + throw ComAtprotoSyncGetHead.toKnownErr(e) + }) + } + getRepo( params?: ComAtprotoSyncGetRepo.QueryParams, opts?: ComAtprotoSyncGetRepo.CallOptions, @@ -492,17 +529,6 @@ export class SyncNS { }) } - getRoot( - params?: ComAtprotoSyncGetRoot.QueryParams, - opts?: ComAtprotoSyncGetRoot.CallOptions, - ): Promise { - return this._service.xrpc - .call('com.atproto.sync.getRoot', params, undefined, opts) - .catch((e) => { - throw ComAtprotoSyncGetRoot.toKnownErr(e) - }) - } - updateRepo( data?: ComAtprotoSyncUpdateRepo.InputSchema, opts?: ComAtprotoSyncUpdateRepo.CallOptions, diff --git a/packages/api/src/client/lexicons.ts b/packages/api/src/client/lexicons.ts index c1d121029db..56061c61263 100644 --- a/packages/api/src/client/lexicons.ts +++ b/packages/api/src/client/lexicons.ts @@ -828,9 +828,9 @@ export const schemaDict = { }, }, }, - ComAtprotoSyncGetRepo: { + ComAtprotoSyncGetCheckout: { lexicon: 1, - id: 'com.atproto.sync.getRepo', + id: 'com.atproto.sync.getCheckout', defs: { main: { type: 'query', @@ -843,9 +843,10 @@ export const schemaDict = { type: 'string', description: 'The DID of the repo.', }, - from: { + commit: { type: 'string', - description: 'A past commit CID.', + description: + 'The commit to get the checkout from. Defaults to current HEAD.', }, }, }, @@ -855,13 +856,56 @@ export const schemaDict = { }, }, }, - ComAtprotoSyncGetRoot: { + ComAtprotoSyncGetCommitPath: { + lexicon: 1, + id: 'com.atproto.sync.getCommitPath', + defs: { + main: { + type: 'query', + description: 'Gets the path of repo commits', + parameters: { + type: 'params', + required: ['did'], + properties: { + did: { + type: 'string', + description: 'The DID of the repo.', + }, + latest: { + type: 'string', + description: 'The most recent commit', + }, + earliest: { + type: 'string', + description: 'The earliest commit to start from', + }, + }, + }, + output: { + encoding: 'application/json', + schema: { + type: 'object', + required: ['commits'], + properties: { + commits: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + }, + }, + }, + }, + ComAtprotoSyncGetHead: { lexicon: 1, - id: 'com.atproto.sync.getRoot', + id: 'com.atproto.sync.getHead', defs: { main: { type: 'query', - description: 'Gets the current root CID of a repo.', + description: 'Gets the current HEAD CID of a repo.', parameters: { type: 'params', required: ['did'], @@ -887,6 +931,33 @@ export const schemaDict = { }, }, }, + ComAtprotoSyncGetRepo: { + lexicon: 1, + id: 'com.atproto.sync.getRepo', + defs: { + main: { + type: 'query', + description: 'Gets the repo state.', + parameters: { + type: 'params', + required: ['did'], + properties: { + did: { + type: 'string', + description: 'The DID of the repo.', + }, + from: { + type: 'string', + description: 'A past commit CID.', + }, + }, + }, + output: { + encoding: 'application/cbor', + }, + }, + }, + }, ComAtprotoSyncUpdateRepo: { lexicon: 1, id: 'com.atproto.sync.updateRepo', @@ -2998,8 +3069,10 @@ export const ids = { ComAtprotoSessionDelete: 'com.atproto.session.delete', ComAtprotoSessionGet: 'com.atproto.session.get', ComAtprotoSessionRefresh: 'com.atproto.session.refresh', + ComAtprotoSyncGetCheckout: 'com.atproto.sync.getCheckout', + ComAtprotoSyncGetCommitPath: 'com.atproto.sync.getCommitPath', + ComAtprotoSyncGetHead: 'com.atproto.sync.getHead', ComAtprotoSyncGetRepo: 'com.atproto.sync.getRepo', - ComAtprotoSyncGetRoot: 'com.atproto.sync.getRoot', ComAtprotoSyncUpdateRepo: 'com.atproto.sync.updateRepo', AppBskyActorCreateScene: 'app.bsky.actor.createScene', AppBskyActorGetProfile: 'app.bsky.actor.getProfile', diff --git a/packages/api/src/client/types/com/atproto/sync/getCheckout.ts b/packages/api/src/client/types/com/atproto/sync/getCheckout.ts new file mode 100644 index 00000000000..8f0a1a6c8c6 --- /dev/null +++ b/packages/api/src/client/types/com/atproto/sync/getCheckout.ts @@ -0,0 +1,29 @@ +/** + * GENERATED CODE - DO NOT MODIFY + */ +import { Headers, XRPCError } from '@atproto/xrpc' + +export interface QueryParams { + /** The DID of the repo. */ + did: string + /** The commit to get the checkout from. Defaults to current HEAD. */ + commit?: string +} + +export type InputSchema = undefined + +export interface CallOptions { + headers?: Headers +} + +export interface Response { + success: boolean + headers: Headers + data: Uint8Array +} + +export function toKnownErr(e: any) { + if (e instanceof XRPCError) { + } + return e +} diff --git a/packages/api/src/client/types/com/atproto/sync/getCommitPath.ts b/packages/api/src/client/types/com/atproto/sync/getCommitPath.ts new file mode 100644 index 00000000000..60ede05b579 --- /dev/null +++ b/packages/api/src/client/types/com/atproto/sync/getCommitPath.ts @@ -0,0 +1,36 @@ +/** + * GENERATED CODE - DO NOT MODIFY + */ +import { Headers, XRPCError } from '@atproto/xrpc' + +export interface QueryParams { + /** The DID of the repo. */ + did: string + /** The most recent commit */ + latest?: string + /** The earliest commit to start from */ + earliest?: string +} + +export type InputSchema = undefined + +export interface OutputSchema { + commits: string[] + [k: string]: unknown +} + +export interface CallOptions { + headers?: Headers +} + +export interface Response { + success: boolean + headers: Headers + data: OutputSchema +} + +export function toKnownErr(e: any) { + if (e instanceof XRPCError) { + } + return e +} diff --git a/packages/api/src/client/types/com/atproto/sync/getRoot.ts b/packages/api/src/client/types/com/atproto/sync/getHead.ts similarity index 100% rename from packages/api/src/client/types/com/atproto/sync/getRoot.ts rename to packages/api/src/client/types/com/atproto/sync/getHead.ts diff --git a/packages/auth/README.md b/packages/auth/README.md deleted file mode 100644 index 1fff67b6fc9..00000000000 --- a/packages/auth/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# ATP Auth Library - -ATP's core permissioning library (based on UCANs). \ No newline at end of file diff --git a/packages/auth/build.js b/packages/auth/build.js deleted file mode 100644 index 5628aa4f4eb..00000000000 --- a/packages/auth/build.js +++ /dev/null @@ -1,22 +0,0 @@ -const pkgJson = require('@npmcli/package-json') -const { nodeExternalsPlugin } = require('esbuild-node-externals') - -const buildShallow = - process.argv.includes('--shallow') || process.env.ATP_BUILD_SHALLOW === 'true' - -if (process.argv.includes('--update-main-to-dist')) { - return pkgJson - .load(__dirname) - .then((pkg) => pkg.update({ main: 'dist/index.js' })) - .then((pkg) => pkg.save()) -} - -require('esbuild').build({ - logLevel: 'info', - entryPoints: ['src/index.ts'], - bundle: true, - sourcemap: true, - outdir: 'dist', - platform: 'node', - plugins: buildShallow ? [nodeExternalsPlugin()] : [], -}) diff --git a/packages/auth/jest.config.js b/packages/auth/jest.config.js deleted file mode 100644 index 39049495ffc..00000000000 --- a/packages/auth/jest.config.js +++ /dev/null @@ -1,6 +0,0 @@ -const base = require('../../jest.config.base.js') - -module.exports = { - ...base, - displayName: 'Auth' -} diff --git a/packages/auth/package.json b/packages/auth/package.json deleted file mode 100644 index c2ed2ba459d..00000000000 --- a/packages/auth/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "@atproto/auth", - "version": "0.0.3", - "main": "src/index.ts", - "license": "MIT", - "scripts": { - "test": "jest", - "prettier": "prettier --check src/", - "prettier:fix": "prettier --write src/", - "lint": "eslint . --ext .ts,.tsx", - "lint:fix": "yarn lint --fix", - "verify": "run-p prettier lint", - "verify:fix": "yarn prettier:fix && yarn lint:fix", - "build": "node ./build.js", - "postbuild": "tsc --build tsconfig.build.json" - }, - "dependencies": { - "@atproto/crypto": "*", - "@atproto/did-resolver": "*", - "@ucans/core": "0.11.0", - "uint8arrays": "3.0.0" - } -} diff --git a/packages/auth/src/atp-capabilities.ts b/packages/auth/src/atp-capabilities.ts deleted file mode 100644 index 397a6bd924c..00000000000 --- a/packages/auth/src/atp-capabilities.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { atpCapability, parseAtpResource } from './atp-semantics' -import * as ucan from '@ucans/core' - -export const writeCap = ( - did: string, - collection?: string, - record?: string, -): ucan.Capability => { - let resource = did - if (collection) { - resource += '/' + collection - } - if (record) { - resource += '/' + record - } - return atpCapability(resource, 'WRITE') -} - -export const maintenanceCap = (did: string): ucan.Capability => { - return atpCapability(did, 'MAINTENANCE') -} - -export const vaguerCap = (cap: ucan.Capability): ucan.Capability | null => { - const rsc = parseAtpResource(cap.with) - if (rsc === null) return null - // can't go vaguer than every collection - if (rsc.collection === '*') return null - if (rsc.record === '*') return writeCap(rsc.did) - return writeCap(rsc.did, rsc.collection) -} diff --git a/packages/auth/src/atp-semantics.ts b/packages/auth/src/atp-semantics.ts deleted file mode 100644 index ac287feab5f..00000000000 --- a/packages/auth/src/atp-semantics.ts +++ /dev/null @@ -1,118 +0,0 @@ -import * as ucans from '@ucans/core' - -/* -ATP Ucans: - -Resource name: 'at' - -- Full permission for account: - at://did:example:userDid/* -- Permission to write to particular application collection: - at://did:example:userDid/com.foo.post/* -- Permission to create a single interaction on user's behalf: - at://did:example:userDid/com.foo.post/234567abcdefg - -Example: -{ - with: { scheme: "at", hierPart: "did:example:userDid/com.foo.post/*" }, - can: { namespace: "atp", segments: [ "WRITE" ] } -} - -At the moment, we support only two capability level: -- 'WRITE': this allows full create/update/delete permissions for the given resource -- 'MAINTENANCE': this does not allow updates to repo objects, but allows maintenance of the repo, such as repo creation -*/ - -export const ATP_ABILITY_LEVELS = { - SUPER_USER: 2, - WRITE: 1, - MAINTENANCE: 0, -} - -export const ATP_ABILITIES: string[] = Object.keys(ATP_ABILITY_LEVELS) - -export type AtpAbility = keyof typeof ATP_ABILITY_LEVELS - -export const isAtpCap = (cap: ucans.Capability): boolean => { - return cap.with.scheme === 'at' && isAtpAbility(cap.can) -} - -export const isAtpAbility = (ability: unknown): ability is AtpAbility => { - if (!ucans.ability.isAbility(ability)) return false - if (ability === ucans.ability.SUPERUSER) return true - const abilitySegment = ability.segments[0] - const isAtpAbilitySegment = - !!abilitySegment && ATP_ABILITIES.includes(abilitySegment) - return isAtpAbilitySegment && ability.namespace.toLowerCase() === 'atp' -} - -export const parseAtpAbility = ( - ability: ucans.ability.Ability, -): AtpAbility | null => { - if (ability === ucans.ability.SUPERUSER) return 'SUPER_USER' - if (isAtpAbility(ability)) return ability.segments[0] as AtpAbility - return null -} - -export const atpCapability = ( - resource: string, - ability: AtpAbility, -): ucans.Capability => { - return { - with: { scheme: 'at', hierPart: resource }, - can: { namespace: 'atp', segments: [ability] }, - } -} -export interface AtpResourcePointer { - did: string - collection: string - record: string -} - -// @TODO: ugly import on param -export const parseAtpResource = ( - pointer: ucans.capability.resourcePointer.ResourcePointer, -): AtpResourcePointer | null => { - if (pointer.scheme !== 'at') return null - - const parts = pointer.hierPart.split('/') - let [did, collection, record] = parts - if (!did) return null - if (!collection) collection = '*' - if (!record) record = '*' - return { - did, - collection, - record, - } -} - -export const atpSemantics: ucans.DelegationSemantics = { - canDelegateResource(parentResource, childResource) { - const parent = parseAtpResource(parentResource) - const child = parseAtpResource(childResource) - - if (parent == null || child == null) return false - if (parent.did !== child.did) return false - - if (parent.collection === '*') return true - if (parent.collection !== child.collection) return false - - if (parent.record === '*') return true - - return parent.record === child.record - }, - - canDelegateAbility(parentAbility, childAbility) { - const parent = parseAtpAbility(parentAbility) - const child = parseAtpAbility(childAbility) - - if (parent == null || child == null) return false - - if (ATP_ABILITY_LEVELS[child] > ATP_ABILITY_LEVELS[parent]) { - return false - } - - return true - }, -} diff --git a/packages/auth/src/auth-store.ts b/packages/auth/src/auth-store.ts deleted file mode 100644 index 66a5c284c95..00000000000 --- a/packages/auth/src/auth-store.ts +++ /dev/null @@ -1,181 +0,0 @@ -import * as ucan from '@ucans/core' -import { DidableKey } from '@ucans/core' - -import { atpSemantics, parseAtpResource } from './atp-semantics' -import { MONTH_IN_SEC, YEAR_IN_SEC } from './consts' -import { CapWithProof, Signer } from './types' -import { vaguerCap, writeCap } from './atp-capabilities' -import { PluginInjectedApi } from './plugins' - -export class AuthStore implements Signer { - protected keypair: DidableKey - protected ucanStore: ucan.StoreI | null = null - protected tokens: string[] - protected controlledDid: string | null - protected ucanApi: PluginInjectedApi - - constructor( - ucanApi: PluginInjectedApi, - keypair: DidableKey, - tokens: string[], - controlledDid?: string, - ) { - this.ucanApi = ucanApi - this.keypair = keypair - this.tokens = tokens - this.controlledDid = controlledDid || null - } - - // Update these for sub classes - // ---------------- - - protected async getKeypair(): Promise { - return this.keypair - } - - async addUcan(token: ucan.Ucan): Promise { - const ucanStore = await this.getUcanStore() - await ucanStore.add(token) - } - - async getUcanStore(): Promise { - if (!this.ucanStore) { - this.ucanStore = await this.ucanApi.Store.fromTokens( - atpSemantics, - this.tokens, - ) - } - return this.ucanStore as ucan.StoreI - } - - async clear(): Promise { - // noop - } - - async reset(): Promise { - // noop - } - - // ---------------- - - async keypairDid(): Promise { - const keypair = await this.getKeypair() - return keypair.did() - } - - async did(): Promise { - if (this.controlledDid) { - return this.controlledDid - } - return this.keypairDid() - } - - async canSignForDid(did: string): Promise { - if (did === this.controlledDid) return true - if (did === (await this.keypairDid())) return true - return false - } - - async sign(data: Uint8Array): Promise { - const keypair = await this.getKeypair() - return keypair.sign(data) - } - - async findProof(cap: ucan.Capability): Promise { - const ucanStore = await this.getUcanStore() - // we only handle atp caps right now - const resource = parseAtpResource(cap.with) - if (resource === null) return null - const res = await ucan.first( - ucanStore.findWithCapability(await this.did(), cap, resource.did), - ) - if (!res) return null - return res - } - - async findUcan(cap: ucan.Capability): Promise { - const chain = await this.findProof(cap) - if (chain === null) return null - return chain.ucan - } - - async hasUcan(cap: ucan.Capability): Promise { - const found = await this.findUcan(cap) - return found !== null - } - - async createUcan( - audience: string, - cap: ucan.Capability, - lifetime = MONTH_IN_SEC, - ): Promise { - const keypair = await this.getKeypair() - const ucanStore = await this.getUcanStore() - return this.ucanApi.Builder.create() - .issuedBy(keypair) - .toAudience(audience) - .withLifetimeInSeconds(lifetime) - .delegateCapability(cap, ucanStore) - .build() - } - - // Creates a UCAN that permissions all required caps - // We find the vaguest proof possible for each cap to avoid unnecessary duplication - async createUcanForCaps( - audience: string, - caps: ucan.Capability[], - lifetime = MONTH_IN_SEC, - ): Promise { - // @TODO make sure to dedupe proofs - const proofs: CapWithProof[] = [] - for (const cap of caps) { - const proof = await this.vaguestProofForCap(cap) - if (proof === null) { - throw new Error(`Could not find a ucan for capability: ${cap.with}`) - } - proofs.push(proof) - } - - const keypair = await this.getKeypair() - - let builder = this.ucanApi.Builder.create() - .issuedBy(keypair) - .toAudience(audience) - .withLifetimeInSeconds(lifetime) - - for (const prf of proofs) { - builder = builder.delegateCapability(prf.cap, prf.prf, atpSemantics) - } - - return builder.build() - } - - // Finds the most general proof for the given cap - // (And thus most likely to overlap with other proofs) - async vaguestProofForCap(cap: ucan.Capability): Promise { - const prf = await this.findProof(cap) - if (prf === null) return null - const vauger = vaguerCap(cap) - if (vauger === null) return { cap, prf } - const vaugerPrf = await this.vaguestProofForCap(vauger) - if (vaugerPrf === null) return { cap, prf } - return vaugerPrf - } - - // Claim a fully permissioned Ucan & add to store - // Mainly for dev purposes - async claimFull(): Promise { - const keypair = await this.getKeypair() - const ownDid = await this.did() - const token = await this.ucanApi.Builder.create() - .issuedBy(keypair) - .toAudience(ownDid) - .withLifetimeInSeconds(YEAR_IN_SEC) - .claimCapability(writeCap(ownDid)) - .build() - await this.addUcan(token) - return token - } -} - -export default AuthStore diff --git a/packages/auth/src/consts.ts b/packages/auth/src/consts.ts deleted file mode 100644 index f12d91badb9..00000000000 --- a/packages/auth/src/consts.ts +++ /dev/null @@ -1,5 +0,0 @@ -export const MIN_IN_SEC = 60 -export const HOUR_IN_SEC = MIN_IN_SEC * 60 -export const DAY_IN_SEC = HOUR_IN_SEC * 24 -export const MONTH_IN_SEC = DAY_IN_SEC * 30 -export const YEAR_IN_SEC = DAY_IN_SEC * 365 diff --git a/packages/auth/src/index.ts b/packages/auth/src/index.ts deleted file mode 100644 index d28aca99bf1..00000000000 --- a/packages/auth/src/index.ts +++ /dev/null @@ -1,14 +0,0 @@ -export * from './verifier' -export * from './atp-semantics' -export * from './atp-capabilities' -export * from './auth-store' -export * from './verify' -export * from './types' -export * from './signatures' -export * from './plugins' - -export { EcdsaKeypair } from '@atproto/crypto' - -export * as ucans from '@ucans/core' -export { encode as encodeUcan } from '@ucans/core' -export type { Ucan, DidableKey } from '@ucans/core' diff --git a/packages/auth/src/plugins.ts b/packages/auth/src/plugins.ts deleted file mode 100644 index 62f2dec1606..00000000000 --- a/packages/auth/src/plugins.ts +++ /dev/null @@ -1,61 +0,0 @@ -import * as ucans from '@ucans/core' - -// @TODO move to ucan package -export type BuildFn = (params: { - issuer: ucans.DidableKey - audience: string - capabilities?: Array - lifetimeInSeconds?: number - expiration?: number - notBefore?: number - facts?: Array - proofs?: Array - addNonce?: boolean -}) => Promise - -export type SignFn = ( - payload: ucans.UcanPayload, - jwtAlg: string, - signFn: (data: Uint8Array) => Promise, -) => Promise - -export type SignWithKeypairFn = ( - payload: ucans.UcanPayload, - keypair: ucans.Keypair, -) => Promise - -export type ValidateFn = ( - encodedUcan: string, - opts?: Partial, -) => Promise - -export type ValidateProofsFn = ( - ucan: ucans.Ucan, - opts?: Partial, -) => AsyncIterable - -export type VerifyFn = ( - ucan: string, - options: ucans.VerifyOptions, -) => Promise> - -export type DelegationChainsFn = ( - semantics: ucans.DelegationSemantics, - ucan: ucans.Ucan, - isRevoked?: (ucan: ucans.Ucan) => Promise, -) => AsyncIterable - -export type BuilderClass = any -export type StoreClass = any - -export type PluginInjectedApi = { - build: BuildFn - sign: SignFn - signWithKeypair: SignWithKeypairFn - validate: ValidateFn - validateProofs: ValidateProofsFn - verify: VerifyFn - Builder: BuilderClass - Store: StoreClass - delegationChains: DelegationChainsFn -} diff --git a/packages/auth/src/signatures.ts b/packages/auth/src/signatures.ts deleted file mode 100644 index e62eddfe995..00000000000 --- a/packages/auth/src/signatures.ts +++ /dev/null @@ -1,16 +0,0 @@ -import * as uint8arrays from 'uint8arrays' -import * as ucans from '@ucans/core' - -export const verifySignature = - (plugins: ucans.Plugins) => - async (did: string, data: Uint8Array, sig: Uint8Array): Promise => { - return plugins.verifySignature(did, data, sig) - } - -export const verifySignatureUtf8 = - (plugins: ucans.Plugins) => - async (did: string, data: string, sig: string): Promise => { - const dataBytes = uint8arrays.fromString(data, 'utf8') - const sigBytes = uint8arrays.fromString(sig, 'base64url') - return verifySignature(plugins)(did, dataBytes, sigBytes) - } diff --git a/packages/auth/src/types.ts b/packages/auth/src/types.ts deleted file mode 100644 index 29f7d5ad4a2..00000000000 --- a/packages/auth/src/types.ts +++ /dev/null @@ -1,10 +0,0 @@ -import * as ucan from '@ucans/core' - -export interface Signer { - sign: (data: Uint8Array) => Promise -} - -export type CapWithProof = { - cap: ucan.Capability - prf: ucan.DelegationChain -} diff --git a/packages/auth/src/verifier.ts b/packages/auth/src/verifier.ts deleted file mode 100644 index 91a34e2b5da..00000000000 --- a/packages/auth/src/verifier.ts +++ /dev/null @@ -1,123 +0,0 @@ -import * as ucans from '@ucans/core' -import { DidableKey, EcdsaKeypair, p256Plugin } from '@atproto/crypto' -import { PluginInjectedApi } from './plugins' -import { verifySignature, verifySignatureUtf8 } from './signatures' -import { verifyUcan, verifyAtpUcan, verifyFullWritePermission } from './verify' -import AuthStore from './auth-store' -import { DidResolver } from '@atproto/did-resolver' - -export const DID_KEY_PLUGINS = [p256Plugin] - -export type VerifierOpts = { - didResolver: DidResolver - plcUrl: string - resolutionTimeout: number - additionalDidMethods: Record - additionalDidKeys: [ucans.DidKeyPlugin] -} - -export class Verifier { - didResolver: DidResolver - plugins: ucans.Plugins - ucanApi: PluginInjectedApi - - constructor(opts: Partial = {}) { - const { - additionalDidKeys = [], - additionalDidMethods = {}, - plcUrl, - resolutionTimeout, - } = opts - - const resolver = - opts.didResolver ?? - new DidResolver({ - plcUrl, - timeout: resolutionTimeout, - }) - - // handles did:web & did:plc - const methodPlugins: ucans.DidMethodPlugin = { - checkJwtAlg: (_did, _jwtAlg) => { - return true - }, - verifySignature: async (did, data, sig) => { - const atpData = await resolver.resolveAtpData(did) - return this.verifySignature(atpData.signingKey, data, sig) - }, - } - - const plugins = new ucans.Plugins( - [...DID_KEY_PLUGINS, ...additionalDidKeys], - { - ...additionalDidMethods, - plc: methodPlugins, - web: methodPlugins, - }, - ) - - this.ucanApi = ucans.getPluginInjectedApi(plugins) - this.plugins = plugins - } - - loadAuthStore( - keypair: DidableKey, - tokens: string[], - controlledDid?: string, - ): AuthStore { - return new AuthStore(this.ucanApi, keypair, tokens, controlledDid) - } - - async createTempAuthStore(tokens: string[] = []): Promise { - const keypair = await EcdsaKeypair.create() - return this.loadAuthStore(keypair, tokens) - } - - async verifySignature( - did: string, - data: Uint8Array, - sig: Uint8Array, - ): Promise { - return verifySignature(this.plugins)(did, data, sig) - } - - async verifySignatureUtf8( - did: string, - data: string, - sig: string, - ): Promise { - return verifySignatureUtf8(this.plugins)(did, data, sig) - } - - async verifyUcan( - token: ucans.Ucan | string, - opts: ucans.VerifyOptions, - ): Promise { - return verifyUcan(this.ucanApi)(token, opts) - } - - async verifyAtpUcan( - token: ucans.Ucan | string, - audience: string, - cap: ucans.Capability, - ): Promise { - return verifyAtpUcan(this.ucanApi)(token, audience, cap) - } - - async verifyFullWritePermission( - token: ucans.Ucan | string, - audience: string, - repoDid: string, - ): Promise { - return verifyFullWritePermission(this.ucanApi)(token, audience, repoDid) - } - - async validateUcan( - encodedUcan: string, - opts?: Partial, - ): Promise { - return this.ucanApi.validate(encodedUcan, opts) - } -} - -export default Verifier diff --git a/packages/auth/src/verify.ts b/packages/auth/src/verify.ts deleted file mode 100644 index ac4a5748d61..00000000000 --- a/packages/auth/src/verify.ts +++ /dev/null @@ -1,53 +0,0 @@ -import * as ucans from '@ucans/core' -import { writeCap } from './atp-capabilities' -import { atpSemantics, parseAtpResource } from './atp-semantics' -import { PluginInjectedApi } from './plugins' - -export const verifyUcan = - (ucanApi: PluginInjectedApi) => - async ( - token: ucans.Ucan | string, - opts: ucans.VerifyOptions, - ): Promise => { - const encoded = typeof token === 'string' ? token : ucans.encode(token) - const res = await ucanApi.verify(encoded, { - ...opts, - semantics: opts.semantics || atpSemantics, - }) - if (!res.ok) { - if (res.error[0]) { - throw res.error[0] - } else { - throw new Error('Could not find requested capability') - } - } - return ucanApi.validate(encoded) - } - -export const verifyAtpUcan = - (ucanApi: PluginInjectedApi) => - async ( - token: ucans.Ucan | string, - audience: string, - cap: ucans.Capability, - ): Promise => { - const atpResource = parseAtpResource(cap.with) - if (atpResource === null) { - throw new Error(`Expected a valid atp resource: ${cap.with}`) - } - const repoDid = atpResource.did - return verifyUcan(ucanApi)(token, { - audience, - requiredCapabilities: [{ capability: cap, rootIssuer: repoDid }], - }) - } - -export const verifyFullWritePermission = - (ucanApi: PluginInjectedApi) => - async ( - token: ucans.Ucan | string, - audience: string, - repoDid: string, - ): Promise => { - return verifyAtpUcan(ucanApi)(token, audience, writeCap(repoDid)) - } diff --git a/packages/auth/tests/auth.test.ts b/packages/auth/tests/auth.test.ts deleted file mode 100644 index 346fbbb04d3..00000000000 --- a/packages/auth/tests/auth.test.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { writeCap } from '../src/atp-capabilities' -import { Verifier, AuthStore, Ucan, ucans } from '../src' - -describe('tokens for post', () => { - const collection = 'com.example.microblog' - const record = '3iwc-gvs-ehpk-2s' - const serverDid = 'did:example:fakeServerDid' - - const verifier = new Verifier() - - let authStore: AuthStore - let token: Ucan - let rootDid: string - let cap: ucans.Capability - let fullUcan: Ucan - - it('validates a fully claimed ucan from the root DID', async () => { - authStore = await verifier.createTempAuthStore() - fullUcan = await authStore.claimFull() - rootDid = await authStore.did() - - cap = writeCap(rootDid, collection, record) - - await verifier.verifyAtpUcan(fullUcan, fullUcan.payload.aud, cap) - }) - - it('creates a valid token for a post', async () => { - token = await authStore.createUcan(serverDid, cap, 30) - await verifier.verifyAtpUcan(token, serverDid, cap) - }) - - it('throws an error for the wrong collection', async () => { - const collectionCap = writeCap( - rootDid, - 'com.example.otherCollection', - record, - ) - try { - const res = await verifier.verifyAtpUcan(token, serverDid, collectionCap) - expect(res).toBe(null) - } catch (err) { - expect(err).toBeTruthy() - } - }) - - it('throws an error for the wrong record name', async () => { - const recordCap = writeCap(rootDid, collection, '3iwc-gvs-ehpk-2z') - try { - const res = await verifier.verifyAtpUcan(token, serverDid, recordCap) - expect(res).toBe(null) - } catch (err) { - expect(err).toBeTruthy() - } - }) -}) diff --git a/packages/auth/tsconfig.build.json b/packages/auth/tsconfig.build.json deleted file mode 100644 index 27df65b89e2..00000000000 --- a/packages/auth/tsconfig.build.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "extends": "./tsconfig.json", - "exclude": ["**/*.spec.ts", "**/*.test.ts"] -} \ No newline at end of file diff --git a/packages/auth/tsconfig.json b/packages/auth/tsconfig.json deleted file mode 100644 index 57da1d2c5f0..00000000000 --- a/packages/auth/tsconfig.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", // Your outDir, - "emitDeclarationOnly": true - }, - "include": ["./src","__tests__/**/**.ts"], - "references": [{ "path": "../crypto/tsconfig.build.json" }] -} \ No newline at end of file diff --git a/packages/aws/README.md b/packages/aws/README.md index eca629fc162..19e36786da6 100644 --- a/packages/aws/README.md +++ b/packages/aws/README.md @@ -1,3 +1,3 @@ # AWS KMS -A DidableKeypair-compatible wrapper for AWS KMS. \ No newline at end of file +A Keypair-compatible wrapper for AWS KMS. \ No newline at end of file diff --git a/packages/aws/src/kms.ts b/packages/aws/src/kms.ts index 02cc725738f..ac1e27c0d66 100644 --- a/packages/aws/src/kms.ts +++ b/packages/aws/src/kms.ts @@ -10,7 +10,7 @@ export type KmsConfig = { keyId: string } & Omit< 'apiVersion' > -export class KmsKeypair implements crypto.DidableKey { +export class KmsKeypair implements crypto.Keypair { jwtAlg = crypto.SECP256K1_JWT_ALG constructor( diff --git a/packages/common/src/blocks.ts b/packages/common/src/blocks.ts index 901ce19cb9f..6c890e36a9d 100644 --- a/packages/common/src/blocks.ts +++ b/packages/common/src/blocks.ts @@ -1,20 +1,30 @@ import { CID } from 'multiformats/cid' import * as Block from 'multiformats/block' import * as rawCodec from 'multiformats/codecs/raw' -import { sha256 as blockHasher } from 'multiformats/hashes/sha2' +import { sha256 } from 'multiformats/hashes/sha2' import * as mf from 'multiformats' -import * as blockCodec from '@ipld/dag-cbor' +import * as cborCodec from '@ipld/dag-cbor' export const valueToIpldBlock = async (data: unknown) => { return Block.encode({ value: data, - codec: blockCodec, - hasher: blockHasher, + codec: cborCodec, + hasher: sha256, }) } +export const verifyCidForBytes = async (cid: CID, bytes: Uint8Array) => { + const digest = await sha256.digest(bytes) + const expected = CID.createV1(cid.code, digest) + if (!cid.equals(expected)) { + throw new Error( + `Not a valid CID for bytes. Expected: ${expected.toString()} Got: ${cid.toString()}`, + ) + } +} + export const sha256RawToCid = (hash: Uint8Array): CID => { - const digest = mf.digest.create(blockHasher.code, hash) + const digest = mf.digest.create(sha256.code, hash) return CID.createV1(rawCodec.code, digest) } @@ -24,11 +34,11 @@ export const cidForData = async (data: unknown): Promise => { } export const valueToIpldBytes = (value: unknown): Uint8Array => { - return blockCodec.encode(value) + return cborCodec.encode(value) } export const ipldBytesToValue = (bytes: Uint8Array) => { - return blockCodec.decode(bytes) + return cborCodec.decode(bytes) } export const ipldBytesToRecord = (bytes: Uint8Array): object => { diff --git a/packages/common/src/check.ts b/packages/common/src/check.ts index db1def6a629..47bdce95958 100644 --- a/packages/common/src/check.ts +++ b/packages/common/src/check.ts @@ -1,13 +1,22 @@ -export interface Def { +import { ZodError } from 'zod' + +export interface Checkable { parse: (obj: unknown) => T - safeParse: (obj: unknown) => { success: boolean } + safeParse: ( + obj: unknown, + ) => { success: true; data: T } | { success: false; error: ZodError } +} + +export interface Def { + name: string + schema: Checkable } -export const is = (obj: unknown, def: Def): obj is T => { +export const is = (obj: unknown, def: Checkable): obj is T => { return def.safeParse(obj).success } -export const assure = (def: Def, obj: unknown): T => { +export const assure = (def: Checkable, obj: unknown): T => { return def.parse(obj) } diff --git a/packages/common/src/types.ts b/packages/common/src/types.ts index 735d4d6b294..b89a7c2a6b8 100644 --- a/packages/common/src/types.ts +++ b/packages/common/src/types.ts @@ -1,45 +1,41 @@ import * as mf from 'multiformats/cid' import { z } from 'zod' +import { Def } from './check' -const cid = z +const cidSchema = z .any() .refine((obj: unknown) => mf.CID.asCID(obj) !== null, { message: 'Not a CID', }) .transform((obj: unknown) => mf.CID.asCID(obj) as mf.CID) -export const isCid = (str: string): boolean => { - try { - mf.CID.parse(str) - return true - } catch (err) { - return false - } +export const schema = { + cid: cidSchema, + bytes: z.instanceof(Uint8Array), + string: z.string(), + record: z.record(z.string(), z.unknown()), + unknown: z.unknown(), } -const strToCid = z - .string() - .refine(isCid, { message: 'Not a valid CID' }) - .transform((str: string) => mf.CID.parse(str)) - -const bytes = z.instanceof(Uint8Array) -export type Bytes = z.infer - -const strToInt = z - .string() - .refine((str) => !isNaN(parseInt(str)), { - message: 'Cannot parse string to integer', - }) - .transform((str) => parseInt(str)) - -const strToBool = z.string().transform((str) => str === 'true' || str === 't') - export const def = { - string: z.string(), - record: z.record(z.string(), z.unknown()), - cid, - strToCid, - bytes, - strToInt, - strToBool, + cid: { + name: 'cid', + schema: schema.cid, + } as Def, + bytes: { + name: 'bytes', + schema: schema.bytes, + } as Def, + string: { + name: 'string', + schema: schema.string, + } as Def, + record: { + name: 'record', + schema: schema.record, + } as Def>, + unknown: { + name: 'unknown', + schema: schema.unknown, + } as Def, } diff --git a/packages/crypto/package.json b/packages/crypto/package.json index 5f6eb3f4282..11f1d6f5ab7 100644 --- a/packages/crypto/package.json +++ b/packages/crypto/package.json @@ -15,10 +15,10 @@ "postbuild": "tsc --build tsconfig.build.json" }, "dependencies": { + "@atproto/crypto": "*", "@noble/secp256k1": "^1.7.0", - "@ucans/core": "0.11.0", "big-integer": "^1.6.51", - "multiformats": "^9.6.4", + "multiformats": "^9.6.4", "one-webcrypto": "^1.0.3", "uint8arrays": "3.0.0" } diff --git a/packages/crypto/src/index.ts b/packages/crypto/src/index.ts index b51e9b1762d..487b2110697 100644 --- a/packages/crypto/src/index.ts +++ b/packages/crypto/src/index.ts @@ -4,6 +4,7 @@ export * from './did' export * from './multibase' export * from './random' export * from './sha' +export * from './types' export * from './verify' export * from './p256/keypair' @@ -11,5 +12,3 @@ export * from './p256/plugin' export * from './secp256k1/keypair' export * from './secp256k1/plugin' - -export type { DidableKey } from '@ucans/core' diff --git a/packages/crypto/src/p256/keypair.ts b/packages/crypto/src/p256/keypair.ts index 18c5136093f..3950b7669fb 100644 --- a/packages/crypto/src/p256/keypair.ts +++ b/packages/crypto/src/p256/keypair.ts @@ -1,17 +1,16 @@ import { webcrypto } from 'one-webcrypto' import * as uint8arrays from 'uint8arrays' - -import * as ucan from '@ucans/core' - +import { SupportedEncodings } from 'uint8arrays/util/bases' import * as did from '../did' import * as operations from './operations' import { P256_JWT_ALG } from '../const' +import { Keypair } from '../types' export type EcdsaKeypairOptions = { exportable: boolean } -export class EcdsaKeypair implements ucan.DidableKey { +export class EcdsaKeypair implements Keypair { jwtAlg = P256_JWT_ALG private publicKey: Uint8Array private keypair: CryptoKeyPair @@ -56,7 +55,7 @@ export class EcdsaKeypair implements ucan.DidableKey { return this.publicKey } - publicKeyStr(encoding: ucan.Encodings = 'base64pad'): string { + publicKeyStr(encoding: SupportedEncodings = 'base64pad'): string { return uint8arrays.toString(this.publicKey, encoding) } diff --git a/packages/crypto/src/p256/plugin.ts b/packages/crypto/src/p256/plugin.ts index a275736e53b..d2c304d0fde 100644 --- a/packages/crypto/src/p256/plugin.ts +++ b/packages/crypto/src/p256/plugin.ts @@ -1,8 +1,6 @@ -import { DidKeyPlugin } from '@ucans/core' - -import { P256_DID_PREFIX, P256_JWT_ALG } from '../const' - import * as operations from './operations' +import { DidKeyPlugin } from '../types' +import { P256_DID_PREFIX, P256_JWT_ALG } from '../const' export const p256Plugin: DidKeyPlugin = { prefix: P256_DID_PREFIX, diff --git a/packages/crypto/src/secp256k1/keypair.ts b/packages/crypto/src/secp256k1/keypair.ts index 37aaa5791fa..a06a7881a8e 100644 --- a/packages/crypto/src/secp256k1/keypair.ts +++ b/packages/crypto/src/secp256k1/keypair.ts @@ -1,14 +1,15 @@ -import * as ucan from '@ucans/core' import * as secp from '@noble/secp256k1' import * as uint8arrays from 'uint8arrays' +import { SupportedEncodings } from 'uint8arrays/util/bases' import * as did from '../did' import { SECP256K1_JWT_ALG } from '../const' +import { Keypair } from '../types' export type Secp256k1KeypairOptions = { exportable: boolean } -export class Secp256k1Keypair implements ucan.DidableKey { +export class Secp256k1Keypair implements Keypair { jwtAlg = SECP256K1_JWT_ALG private publicKey: Uint8Array @@ -40,7 +41,7 @@ export class Secp256k1Keypair implements ucan.DidableKey { return this.publicKey } - publicKeyStr(encoding: ucan.Encodings = 'base64pad'): string { + publicKeyStr(encoding: SupportedEncodings = 'base64pad'): string { return uint8arrays.toString(this.publicKey, encoding) } diff --git a/packages/crypto/src/secp256k1/plugin.ts b/packages/crypto/src/secp256k1/plugin.ts index 6db5bad3909..5184a3778fa 100644 --- a/packages/crypto/src/secp256k1/plugin.ts +++ b/packages/crypto/src/secp256k1/plugin.ts @@ -1,5 +1,5 @@ -import { DidKeyPlugin } from '@ucans/core' import * as operations from './operations' +import { DidKeyPlugin } from '../types' import { SECP256K1_DID_PREFIX, SECP256K1_JWT_ALG } from '../const' export const secp256k1Plugin: DidKeyPlugin = { diff --git a/packages/crypto/src/types.ts b/packages/crypto/src/types.ts new file mode 100644 index 00000000000..20c7a5ea78a --- /dev/null +++ b/packages/crypto/src/types.ts @@ -0,0 +1,19 @@ +export interface Signer { + sign(msg: Uint8Array): Promise +} + +export interface Didable { + did(): string +} + +export interface Keypair extends Signer, Didable {} + +export type DidKeyPlugin = { + prefix: Uint8Array + jwtAlg: string + verifySignature: ( + did: string, + msg: Uint8Array, + data: Uint8Array, + ) => Promise +} diff --git a/packages/crypto/src/verify.ts b/packages/crypto/src/verify.ts index 49a17e16f38..43b2670c7cd 100644 --- a/packages/crypto/src/verify.ts +++ b/packages/crypto/src/verify.ts @@ -1,15 +1,26 @@ +import * as uint8arrays from 'uint8arrays' import { parseDidKey } from './did' import plugins from './plugins' -export const verifyDidSig = ( - did: string, +export const verifySignature = ( + didKey: string, data: Uint8Array, sig: Uint8Array, ): Promise => { - const parsed = parseDidKey(did) + const parsed = parseDidKey(didKey) const plugin = plugins.find((p) => p.jwtAlg === parsed.jwtAlg) if (!plugin) { throw new Error(`Unsupported signature alg: :${parsed.jwtAlg}`) } - return plugin.verifySignature(did, data, sig) + return plugin.verifySignature(didKey, data, sig) +} + +export const verifySignatureUtf8 = async ( + didKey: string, + data: string, + sig: string, +): Promise => { + const dataBytes = uint8arrays.fromString(data, 'utf8') + const sigBytes = uint8arrays.fromString(sig, 'base64url') + return verifySignature(didKey, dataBytes, sigBytes) } diff --git a/packages/crypto/tsconfig.json b/packages/crypto/tsconfig.json index 624cc32a35f..3fbf9be377c 100644 --- a/packages/crypto/tsconfig.json +++ b/packages/crypto/tsconfig.json @@ -4,5 +4,8 @@ "outDir": "./dist", // Your outDir, "emitDeclarationOnly": true }, - "include": ["./src","__tests__/**/**.ts"] + "include": ["./src","__tests__/**/**.ts"], + "references": [ + { "path": "../crypto/tsconfig.build.json" } + ] } \ No newline at end of file diff --git a/packages/did-resolver/src/resolver.ts b/packages/did-resolver/src/resolver.ts index eaef35bf2b3..540b3cbbdcc 100644 --- a/packages/did-resolver/src/resolver.ts +++ b/packages/did-resolver/src/resolver.ts @@ -4,6 +4,7 @@ import { DIDResolutionOptions, DIDResolutionResult, } from 'did-resolver' +import * as crypto from '@atproto/crypto' import * as web from './web-resolver' import * as plc from './plc-resolver' import * as atpDid from './atp-did' @@ -55,6 +56,24 @@ export class DidResolver { const didDocument = await this.ensureResolveDid(did) return atpDid.ensureAtpDocument(didDocument) } + + async resolveSigningKey(did: string): Promise { + if (did.startsWith('did:key:')) { + return did + } else { + const data = await this.resolveAtpData(did) + return data.signingKey + } + } + + async verifySignature( + did: string, + data: Uint8Array, + sig: Uint8Array, + ): Promise { + const signingKey = await this.resolveSigningKey(did) + return crypto.verifySignature(signingKey, data, sig) + } } export const resolver = new DidResolver() diff --git a/packages/pds/package.json b/packages/pds/package.json index b31d5e5040e..7ac89e04016 100644 --- a/packages/pds/package.json +++ b/packages/pds/package.json @@ -21,7 +21,6 @@ "migration:create": "ts-node ./bin/migration-create.ts" }, "dependencies": { - "@atproto/auth": "*", "@atproto/common": "*", "@atproto/crypto": "*", "@atproto/did-resolver": "*", diff --git a/packages/pds/src/api/app/bsky/actor/createScene.ts b/packages/pds/src/api/app/bsky/actor/createScene.ts index 16ec221269e..aadd7177a09 100644 --- a/packages/pds/src/api/app/bsky/actor/createScene.ts +++ b/packages/pds/src/api/app/bsky/actor/createScene.ts @@ -91,8 +91,6 @@ export default function (server: Server, ctx: AppContext) { `Could not locate user declaration for ${requester}`, ) } - const userAuth = ctx.getAuthstore(requester) - const sceneAuth = ctx.getAuthstore(did) const sceneWrites = await Promise.all([ repo.prepareCreate({ @@ -161,8 +159,8 @@ export default function (server: Server, ctx: AppContext) { ]) await Promise.all([ - repoTxn.createRepo(did, sceneAuth, sceneWrites, now), - repoTxn.writeToRepo(requester, userAuth, userWrites, now), + repoTxn.createRepo(did, sceneWrites, now), + repoTxn.writeToRepo(requester, userWrites, now), repoTxn.indexWrites([...sceneWrites, ...userWrites], now), ]) diff --git a/packages/pds/src/api/app/bsky/actor/updateProfile.ts b/packages/pds/src/api/app/bsky/actor/updateProfile.ts index 5d2482eef85..c55e986e89b 100644 --- a/packages/pds/src/api/app/bsky/actor/updateProfile.ts +++ b/packages/pds/src/api/app/bsky/actor/updateProfile.ts @@ -24,7 +24,6 @@ export default function (server: Server, ctx: AppContext) { if (!authorized) { throw new AuthRequiredError() } - const authStore = await ctx.getAuthstore(did) const uri = new AtUri(`${did}/${profileNsid}/self`) const { profileCid, updated } = await ctx.db.transaction( @@ -80,7 +79,7 @@ export default function (server: Server, ctx: AppContext) { record: updated, }) - const commit = await repoTxn.writeToRepo(did, authStore, [write], now) + const commit = await repoTxn.writeToRepo(did, [write], now) await repoTxn.blobs.processWriteBlobs(did, commit, [write]) let profileCid: CID diff --git a/packages/pds/src/api/app/bsky/feed/setVote.ts b/packages/pds/src/api/app/bsky/feed/setVote.ts index 0ecf4396c7d..7c0783914d1 100644 --- a/packages/pds/src/api/app/bsky/feed/setVote.ts +++ b/packages/pds/src/api/app/bsky/feed/setVote.ts @@ -11,7 +11,6 @@ export default function (server: Server, ctx: AppContext) { const { subject, direction } = input.body const requester = auth.credentials.did - const authStore = await ctx.getAuthstore(requester) const now = new Date().toISOString() const voteUri = await ctx.db.transaction(async (dbTxn) => { @@ -63,7 +62,7 @@ export default function (server: Server, ctx: AppContext) { } await Promise.all([ - await repoTxn.writeToRepo(requester, authStore, writes, now), + await repoTxn.writeToRepo(requester, writes, now), await repoTxn.indexWrites(writes, now), ]) diff --git a/packages/pds/src/api/com/atproto/account.ts b/packages/pds/src/api/com/atproto/account.ts index 27c0f4b49d9..c68e3e52bff 100644 --- a/packages/pds/src/api/com/atproto/account.ts +++ b/packages/pds/src/api/com/atproto/account.ts @@ -148,8 +148,7 @@ export default function (server: Server, ctx: AppContext) { }) // Setup repo root - const authStore = ctx.getAuthstore(did) - await repoTxn.createRepo(did, authStore, [write], now) + await repoTxn.createRepo(did, [write], now) await repoTxn.indexWrites([write], now) const declarationCid = await cidForData(declaration) diff --git a/packages/pds/src/api/com/atproto/repo.ts b/packages/pds/src/api/com/atproto/repo.ts index e0514da5571..c744213274c 100644 --- a/packages/pds/src/api/com/atproto/repo.ts +++ b/packages/pds/src/api/com/atproto/repo.ts @@ -116,7 +116,6 @@ export default function (server: Server, ctx: AppContext) { ) } - const authStore = ctx.getAuthstore(did) const hasUpdate = tx.writes.some( (write) => write.action === WriteOpAction.Update, ) @@ -159,7 +158,7 @@ export default function (server: Server, ctx: AppContext) { await ctx.db.transaction(async (dbTxn) => { const now = new Date().toISOString() const repoTxn = ctx.services.repo(dbTxn) - await repoTxn.processWrites(did, authStore, writes, now) + await repoTxn.processWrites(did, writes, now) }) }, }) @@ -177,7 +176,6 @@ export default function (server: Server, ctx: AppContext) { if (!authorized) { throw new AuthRequiredError() } - const authStore = ctx.getAuthstore(did) if (validate === false) { throw new InvalidRequestError( 'Unvalidated writes are not yet supported.', @@ -206,7 +204,7 @@ export default function (server: Server, ctx: AppContext) { await ctx.db.transaction(async (dbTxn) => { const repoTxn = ctx.services.repo(dbTxn) - await repoTxn.processWrites(did, authStore, [write], now) + await repoTxn.processWrites(did, [write], now) }) return { @@ -232,15 +230,10 @@ export default function (server: Server, ctx: AppContext) { throw new AuthRequiredError() } - const authStore = ctx.getAuthstore(did) const now = new Date().toISOString() - const write = await repo.prepareDelete({ did, collection, rkey }) - await ctx.db.transaction(async (dbTxn) => { - await ctx.services - .repo(dbTxn) - .processWrites(did, authStore, [write], now) + await ctx.services.repo(dbTxn).processWrites(did, [write], now) }) }, }) diff --git a/packages/pds/src/api/com/atproto/sync.ts b/packages/pds/src/api/com/atproto/sync.ts index 0813397e8be..1ec27707f41 100644 --- a/packages/pds/src/api/com/atproto/sync.ts +++ b/packages/pds/src/api/com/atproto/sync.ts @@ -1,12 +1,12 @@ -import { Server } from '../../../lexicon' -import { InvalidRequestError } from '@atproto/xrpc-server' -import { def as common } from '@atproto/common' +import { CID } from 'multiformats/cid' import { Repo } from '@atproto/repo' +import { InvalidRequestError } from '@atproto/xrpc-server' +import { Server } from '../../../lexicon' import SqlRepoStorage from '../../../sql-repo-storage' import AppContext from '../../../context' export default function (server: Server, ctx: AppContext) { - server.com.atproto.sync.getRoot(async ({ params }) => { + server.com.atproto.sync.getHead(async ({ params }) => { const { did } = params const storage = new SqlRepoStorage(ctx.db, did) const root = await storage.getHead() @@ -19,6 +19,29 @@ export default function (server: Server, ctx: AppContext) { } }) + server.com.atproto.sync.getCommitPath(async ({ params }) => { + const { did } = params + const storage = new SqlRepoStorage(ctx.db, did) + const earliest = params.earliest ? CID.parse(params.earliest) : null + const latest = params.latest + ? CID.parse(params.latest) + : await storage.getHead() + if (latest === null) { + throw new InvalidRequestError(`Could not find root for DID: ${did}`) + } + const commitPath = await storage.getCommitPath(latest, earliest) + if (commitPath === null) { + throw new InvalidRequestError( + `Could not find a valid commit path from ${latest.toString()} to ${earliest?.toString()}`, + ) + } + const commits = commitPath.map((c) => c.toString()) + return { + encoding: 'application/json', + body: { commits }, + } + }) + server.com.atproto.sync.getRepo(async ({ params }) => { const { did, from = null } = params const storage = new SqlRepoStorage(ctx.db, did) @@ -27,14 +50,26 @@ export default function (server: Server, ctx: AppContext) { throw new InvalidRequestError(`Could not find repo for DID: ${did}`) } const repo = await Repo.load(storage, root) - const fromCid = from ? common.strToCid.parse(from) : null - const diff = await repo.getDiffCar(fromCid) + const fromCid = from ? CID.parse(from) : null + const diff = await repo.getDiff(fromCid) return { encoding: 'application/cbor', body: Buffer.from(diff), } }) + server.com.atproto.sync.getCheckout(async ({ params }) => { + const { did } = params + const storage = new SqlRepoStorage(ctx.db, did) + const commit = params.commit ? CID.parse(params.commit) : undefined + const repo = await Repo.load(storage, commit) + const checkout = await repo.getCheckout() + return { + encoding: 'application/cbor', + body: Buffer.from(checkout), + } + }) + server.com.atproto.sync.updateRepo(async () => { throw new InvalidRequestError('Not implemented') }) diff --git a/packages/pds/src/auth.ts b/packages/pds/src/auth.ts index 5a857555169..52d138e2466 100644 --- a/packages/pds/src/auth.ts +++ b/packages/pds/src/auth.ts @@ -1,4 +1,3 @@ -import * as auth from '@atproto/auth' import * as crypto from '@atproto/crypto' import { AuthRequiredError, InvalidRequestError } from '@atproto/xrpc-server' import * as uint8arrays from 'uint8arrays' @@ -33,13 +32,11 @@ export class ServerAuth { private _secret: string private _adminPass: string didResolver: DidResolver - verifier: auth.Verifier constructor(opts: ServerAuthOpts) { this._secret = opts.jwtSecret this._adminPass = opts.adminPass this.didResolver = opts.didResolver - this.verifier = new auth.Verifier({ didResolver: opts.didResolver }) } createAccessToken(did: string, expiresIn?: string | number) { diff --git a/packages/pds/src/context.ts b/packages/pds/src/context.ts index 77784ab4fdc..211ed5288eb 100644 --- a/packages/pds/src/context.ts +++ b/packages/pds/src/context.ts @@ -1,5 +1,5 @@ -import { DidableKey } from '@atproto/auth' import * as plc from '@atproto/plc' +import * as crypto from '@atproto/crypto' import { Database } from './db' import { ServerConfig } from './config' import * as auth from './auth' @@ -14,7 +14,7 @@ export class AppContext { private opts: { db: Database blobstore: BlobStore - keypair: DidableKey + keypair: crypto.Keypair auth: auth.ServerAuth imgUriBuilder: ImageUriBuilder cfg: ServerConfig @@ -32,7 +32,7 @@ export class AppContext { return this.opts.blobstore } - get keypair(): DidableKey { + get keypair(): crypto.Keypair { return this.opts.keypair } @@ -75,10 +75,6 @@ export class AppContext { get plcClient(): plc.PlcClient { return new plc.PlcClient(this.cfg.didPlcUrl) } - - getAuthstore(did: string) { - return this.auth.verifier.loadAuthStore(this.keypair, [], did) - } } export default AppContext diff --git a/packages/pds/src/db/migrations/20221221T013010374Z-repo-sync-data.ts b/packages/pds/src/db/migrations/20221221T013010374Z-repo-sync-data.ts index be4c8497d1c..9451251f223 100644 --- a/packages/pds/src/db/migrations/20221221T013010374Z-repo-sync-data.ts +++ b/packages/pds/src/db/migrations/20221221T013010374Z-repo-sync-data.ts @@ -50,13 +50,13 @@ export async function up(db: DatabaseSchema): Promise { const prev = commitData[i - 1] commit.blocks.forEach((_bytes, cid) => { commitBlock.push({ - commit: commit.root.toString(), + commit: commit.commit.toString(), block: cid.toString(), }) }) commitHistory.push({ - commit: commit.root.toString(), - prev: prev ? prev.root.toString() : null, + commit: commit.commit.toString(), + prev: prev ? prev.commit.toString() : null, }) } const promises: Promise[] = [] diff --git a/packages/pds/src/event-stream/consumers/index.ts b/packages/pds/src/event-stream/consumers/index.ts index ba4a9b10d28..e9c7964aca4 100644 --- a/packages/pds/src/event-stream/consumers/index.ts +++ b/packages/pds/src/event-stream/consumers/index.ts @@ -1,6 +1,5 @@ import { BlobStore } from '@atproto/repo' -import { DidableKey } from '@atproto/crypto' -import { ServerAuth } from '../../auth' +import * as crypto from '@atproto/crypto' import AddMemberConsumer from './add-member' import RemoveMemberConsumer from './remove-member' import AddUpvoteConsumer from './add-upvote' @@ -13,18 +12,14 @@ import { MessageQueue } from '../types' export const listen = ( messageQueue: MessageQueue, blobstore: BlobStore, - auth: ServerAuth, - keypair: DidableKey, + keypair: crypto.Keypair, ) => { - const getAuthStore = (did: string) => { - return auth.verifier.loadAuthStore(keypair, [], did) - } messageQueue.listen('add_member', new AddMemberConsumer()) messageQueue.listen('remove_member', new RemoveMemberConsumer()) messageQueue.listen('add_upvote', new AddUpvoteConsumer()) messageQueue.listen( 'scene_votes_on_post__table_updates', - new SceneVotesOnPostConsumer(getAuthStore, messageQueue, blobstore), + new SceneVotesOnPostConsumer(keypair, messageQueue, blobstore), ) messageQueue.listen('remove_upvote', new RemoveUpvoteConsumer()) messageQueue.listen('create_notification', new CreateNotificationConsumer()) diff --git a/packages/pds/src/event-stream/consumers/scene-votes-on-post.ts b/packages/pds/src/event-stream/consumers/scene-votes-on-post.ts index 38daf0e7c4e..71578d93d6d 100644 --- a/packages/pds/src/event-stream/consumers/scene-votes-on-post.ts +++ b/packages/pds/src/event-stream/consumers/scene-votes-on-post.ts @@ -1,4 +1,4 @@ -import { AuthStore } from '@atproto/auth' +import * as crypto from '@atproto/crypto' import { BlobStore } from '@atproto/repo' import Database from '../../db' import * as repo from '../../repo' @@ -9,7 +9,7 @@ import { SceneVotesOnPostTableUpdates } from '../messages' export default class extends Consumer { constructor( - private getAuthStore: GetAuthStoreFn, + private keypair: crypto.Keypair, private messageQueue: MessageQueue, private blobstore: BlobStore, ) { @@ -46,7 +46,6 @@ export default class extends Consumer { if (!shouldTrend) return // this is a "threshold vote" that makes the post trend - const sceneAuth = this.getAuthStore(scene.did) const write = await repo.prepareCreate({ did: scene.did, collection: lexicons.ids.AppBskyFeedTrend, @@ -65,10 +64,15 @@ export default class extends Consumer { .where('subject', '=', scene.subject) .execute() - const repoTxn = new RepoService(db, this.messageQueue, this.blobstore) + const repoTxn = new RepoService( + db, + this.keypair, + this.messageQueue, + this.blobstore, + ) await Promise.all([ - repoTxn.writeToRepo(scene.did, sceneAuth, [write], now), + repoTxn.writeToRepo(scene.did, [write], now), repoTxn.indexWrites([write], now), setTrendPosted, ]) @@ -76,5 +80,3 @@ export default class extends Consumer { ) } } - -type GetAuthStoreFn = (did: string) => AuthStore diff --git a/packages/pds/src/index.ts b/packages/pds/src/index.ts index 6361d065be0..0088d4845f8 100644 --- a/packages/pds/src/index.ts +++ b/packages/pds/src/index.ts @@ -8,7 +8,7 @@ import express from 'express' import cors from 'cors' import http from 'http' import events from 'events' -import { DidableKey } from '@atproto/auth' +import * as crypto from '@atproto/crypto' import { BlobStore } from '@atproto/repo' import { DidResolver } from '@atproto/did-resolver' import API, { health } from './api' @@ -47,7 +47,7 @@ export class PDS { static create(opts: { db: Database blobstore: BlobStore - keypair: DidableKey + keypair: crypto.Keypair config: ServerConfig }): PDS { const { db, blobstore, keypair, config } = opts @@ -59,7 +59,7 @@ export class PDS { }) const messageQueue = new SqlMessageQueue('pds', db) - streamConsumers.listen(messageQueue, blobstore, auth, keypair) + streamConsumers.listen(messageQueue, blobstore, keypair) const mailTransport = config.emailSmtpUrl !== undefined @@ -91,7 +91,12 @@ export class PDS { config.imgUriKey, ) - const services = createServices({ messageQueue, blobstore, imgUriBuilder }) + const services = createServices({ + keypair, + messageQueue, + blobstore, + imgUriBuilder, + }) const ctx = new AppContext({ db, diff --git a/packages/pds/src/lexicon/index.ts b/packages/pds/src/lexicon/index.ts index 762df1d22e6..f3a0a35d62e 100644 --- a/packages/pds/src/lexicon/index.ts +++ b/packages/pds/src/lexicon/index.ts @@ -28,8 +28,10 @@ import * as ComAtprotoSessionCreate from './types/com/atproto/session/create' import * as ComAtprotoSessionDelete from './types/com/atproto/session/delete' import * as ComAtprotoSessionGet from './types/com/atproto/session/get' import * as ComAtprotoSessionRefresh from './types/com/atproto/session/refresh' +import * as ComAtprotoSyncGetCheckout from './types/com/atproto/sync/getCheckout' +import * as ComAtprotoSyncGetCommitPath from './types/com/atproto/sync/getCommitPath' +import * as ComAtprotoSyncGetHead from './types/com/atproto/sync/getHead' import * as ComAtprotoSyncGetRepo from './types/com/atproto/sync/getRepo' -import * as ComAtprotoSyncGetRoot from './types/com/atproto/sync/getRoot' import * as ComAtprotoSyncUpdateRepo from './types/com/atproto/sync/updateRepo' import * as AppBskyActorCreateScene from './types/app/bsky/actor/createScene' import * as AppBskyActorGetProfile from './types/app/bsky/actor/getProfile' @@ -313,17 +315,31 @@ export class SyncNS { this._server = server } - getRepo( - cfg: ConfigOf>>, + getCheckout( + cfg: ConfigOf>>, ) { - const nsid = 'com.atproto.sync.getRepo' // @ts-ignore + const nsid = 'com.atproto.sync.getCheckout' // @ts-ignore + return this._server.xrpc.method(nsid, cfg) + } + + getCommitPath( + cfg: ConfigOf>>, + ) { + const nsid = 'com.atproto.sync.getCommitPath' // @ts-ignore return this._server.xrpc.method(nsid, cfg) } - getRoot( - cfg: ConfigOf>>, + getHead( + cfg: ConfigOf>>, ) { - const nsid = 'com.atproto.sync.getRoot' // @ts-ignore + const nsid = 'com.atproto.sync.getHead' // @ts-ignore + return this._server.xrpc.method(nsid, cfg) + } + + getRepo( + cfg: ConfigOf>>, + ) { + const nsid = 'com.atproto.sync.getRepo' // @ts-ignore return this._server.xrpc.method(nsid, cfg) } diff --git a/packages/pds/src/lexicon/lexicons.ts b/packages/pds/src/lexicon/lexicons.ts index c1d121029db..56061c61263 100644 --- a/packages/pds/src/lexicon/lexicons.ts +++ b/packages/pds/src/lexicon/lexicons.ts @@ -828,9 +828,9 @@ export const schemaDict = { }, }, }, - ComAtprotoSyncGetRepo: { + ComAtprotoSyncGetCheckout: { lexicon: 1, - id: 'com.atproto.sync.getRepo', + id: 'com.atproto.sync.getCheckout', defs: { main: { type: 'query', @@ -843,9 +843,10 @@ export const schemaDict = { type: 'string', description: 'The DID of the repo.', }, - from: { + commit: { type: 'string', - description: 'A past commit CID.', + description: + 'The commit to get the checkout from. Defaults to current HEAD.', }, }, }, @@ -855,13 +856,56 @@ export const schemaDict = { }, }, }, - ComAtprotoSyncGetRoot: { + ComAtprotoSyncGetCommitPath: { + lexicon: 1, + id: 'com.atproto.sync.getCommitPath', + defs: { + main: { + type: 'query', + description: 'Gets the path of repo commits', + parameters: { + type: 'params', + required: ['did'], + properties: { + did: { + type: 'string', + description: 'The DID of the repo.', + }, + latest: { + type: 'string', + description: 'The most recent commit', + }, + earliest: { + type: 'string', + description: 'The earliest commit to start from', + }, + }, + }, + output: { + encoding: 'application/json', + schema: { + type: 'object', + required: ['commits'], + properties: { + commits: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + }, + }, + }, + }, + ComAtprotoSyncGetHead: { lexicon: 1, - id: 'com.atproto.sync.getRoot', + id: 'com.atproto.sync.getHead', defs: { main: { type: 'query', - description: 'Gets the current root CID of a repo.', + description: 'Gets the current HEAD CID of a repo.', parameters: { type: 'params', required: ['did'], @@ -887,6 +931,33 @@ export const schemaDict = { }, }, }, + ComAtprotoSyncGetRepo: { + lexicon: 1, + id: 'com.atproto.sync.getRepo', + defs: { + main: { + type: 'query', + description: 'Gets the repo state.', + parameters: { + type: 'params', + required: ['did'], + properties: { + did: { + type: 'string', + description: 'The DID of the repo.', + }, + from: { + type: 'string', + description: 'A past commit CID.', + }, + }, + }, + output: { + encoding: 'application/cbor', + }, + }, + }, + }, ComAtprotoSyncUpdateRepo: { lexicon: 1, id: 'com.atproto.sync.updateRepo', @@ -2998,8 +3069,10 @@ export const ids = { ComAtprotoSessionDelete: 'com.atproto.session.delete', ComAtprotoSessionGet: 'com.atproto.session.get', ComAtprotoSessionRefresh: 'com.atproto.session.refresh', + ComAtprotoSyncGetCheckout: 'com.atproto.sync.getCheckout', + ComAtprotoSyncGetCommitPath: 'com.atproto.sync.getCommitPath', + ComAtprotoSyncGetHead: 'com.atproto.sync.getHead', ComAtprotoSyncGetRepo: 'com.atproto.sync.getRepo', - ComAtprotoSyncGetRoot: 'com.atproto.sync.getRoot', ComAtprotoSyncUpdateRepo: 'com.atproto.sync.updateRepo', AppBskyActorCreateScene: 'app.bsky.actor.createScene', AppBskyActorGetProfile: 'app.bsky.actor.getProfile', diff --git a/packages/pds/src/lexicon/types/com/atproto/sync/getCheckout.ts b/packages/pds/src/lexicon/types/com/atproto/sync/getCheckout.ts new file mode 100644 index 00000000000..4163d810133 --- /dev/null +++ b/packages/pds/src/lexicon/types/com/atproto/sync/getCheckout.ts @@ -0,0 +1,35 @@ +/** + * GENERATED CODE - DO NOT MODIFY + */ +import express from 'express' +import stream from 'stream' +import { HandlerAuth } from '@atproto/xrpc-server' + +export interface QueryParams { + /** The DID of the repo. */ + did: string + /** The commit to get the checkout from. Defaults to current HEAD. */ + commit?: string +} + +export type InputSchema = undefined +export type HandlerInput = undefined + +export interface HandlerSuccess { + encoding: 'application/cbor' + body: Uint8Array | stream.Readable +} + +export interface HandlerError { + status: number + message?: string +} + +export type HandlerOutput = HandlerError | HandlerSuccess +export type Handler = (ctx: { + auth: HA + params: QueryParams + input: HandlerInput + req: express.Request + res: express.Response +}) => Promise | HandlerOutput diff --git a/packages/pds/src/lexicon/types/com/atproto/sync/getCommitPath.ts b/packages/pds/src/lexicon/types/com/atproto/sync/getCommitPath.ts new file mode 100644 index 00000000000..380085761b2 --- /dev/null +++ b/packages/pds/src/lexicon/types/com/atproto/sync/getCommitPath.ts @@ -0,0 +1,42 @@ +/** + * GENERATED CODE - DO NOT MODIFY + */ +import express from 'express' +import { HandlerAuth } from '@atproto/xrpc-server' + +export interface QueryParams { + /** The DID of the repo. */ + did: string + /** The most recent commit */ + latest?: string + /** The earliest commit to start from */ + earliest?: string +} + +export type InputSchema = undefined + +export interface OutputSchema { + commits: string[] + [k: string]: unknown +} + +export type HandlerInput = undefined + +export interface HandlerSuccess { + encoding: 'application/json' + body: OutputSchema +} + +export interface HandlerError { + status: number + message?: string +} + +export type HandlerOutput = HandlerError | HandlerSuccess +export type Handler = (ctx: { + auth: HA + params: QueryParams + input: HandlerInput + req: express.Request + res: express.Response +}) => Promise | HandlerOutput diff --git a/packages/pds/src/lexicon/types/com/atproto/sync/getRoot.ts b/packages/pds/src/lexicon/types/com/atproto/sync/getHead.ts similarity index 100% rename from packages/pds/src/lexicon/types/com/atproto/sync/getRoot.ts rename to packages/pds/src/lexicon/types/com/atproto/sync/getHead.ts diff --git a/packages/pds/src/services/index.ts b/packages/pds/src/services/index.ts index 52911f9c13b..f9e0411a8b6 100644 --- a/packages/pds/src/services/index.ts +++ b/packages/pds/src/services/index.ts @@ -1,3 +1,4 @@ +import * as crypto from '@atproto/crypto' import { BlobStore } from '@atproto/repo' import Database from '../db' import { MessageQueue } from '../event-stream/types' @@ -9,17 +10,18 @@ import { RecordService } from './record' import { RepoService } from './repo' export function createServices(resources: { + keypair: crypto.Keypair messageQueue: MessageQueue blobstore: BlobStore imgUriBuilder: ImageUriBuilder }): Services { - const { messageQueue, blobstore, imgUriBuilder } = resources + const { keypair, messageQueue, blobstore, imgUriBuilder } = resources return { actor: ActorService.creator(), auth: AuthService.creator(), feed: FeedService.creator(imgUriBuilder), record: RecordService.creator(messageQueue), - repo: RepoService.creator(messageQueue, blobstore), + repo: RepoService.creator(keypair, messageQueue, blobstore), } } diff --git a/packages/pds/src/services/repo/index.ts b/packages/pds/src/services/repo/index.ts index c46a24b58b6..99a175c3f38 100644 --- a/packages/pds/src/services/repo/index.ts +++ b/packages/pds/src/services/repo/index.ts @@ -1,5 +1,5 @@ import { CID } from 'multiformats/cid' -import * as auth from '@atproto/auth' +import * as crypto from '@atproto/crypto' import { BlobStore, Repo, WriteOpAction } from '@atproto/repo' import { InvalidRequestError } from '@atproto/xrpc-server' import Database from '../../db' @@ -15,14 +15,20 @@ export class RepoService { constructor( public db: Database, + public keypair: crypto.Keypair, public messageQueue: MessageQueue, public blobstore: BlobStore, ) { this.blobs = new RepoBlobs(db, blobstore) } - static creator(messageQueue: MessageQueue, blobstore: BlobStore) { - return (db: Database) => new RepoService(db, messageQueue, blobstore) + static creator( + keypair: crypto.Keypair, + messageQueue: MessageQueue, + blobstore: BlobStore, + ) { + return (db: Database) => + new RepoService(db, keypair, messageQueue, blobstore) } async isUserControlledRepo( @@ -41,28 +47,18 @@ export class RepoService { return !!found } - async createRepo( - did: string, - authStore: auth.AuthStore, - writes: PreparedCreate[], - now: string, - ) { + async createRepo(did: string, writes: PreparedCreate[], now: string) { this.db.assertTransaction() const storage = new SqlRepoStorage(this.db, did, now) const writeOps = writes.map(createWriteToOp) - await Repo.create(storage, did, authStore, writeOps) + await Repo.create(storage, did, this.keypair, writeOps) } - async processWrites( - did: string, - authStore: auth.AuthStore, - writes: PreparedWrite[], - now: string, - ) { + async processWrites(did: string, writes: PreparedWrite[], now: string) { // make structural write to repo & send to indexing // @TODO get commitCid first so we can do all db actions in tandem const [commit] = await Promise.all([ - this.writeToRepo(did, authStore, writes, now), + this.writeToRepo(did, writes, now), this.indexWrites(writes, now), ]) // make blobs permanent & associate w commit + recordUri in DB @@ -71,7 +67,6 @@ export class RepoService { async writeToRepo( did: string, - authStore: auth.AuthStore, writes: PreparedWrite[], now: string, ): Promise { @@ -85,7 +80,7 @@ export class RepoService { } const writeOps = writes.map(writeToOp) const repo = await Repo.load(storage, currRoot) - const updated = await repo.applyCommit(writeOps, authStore) + const updated = await repo.applyCommit(writeOps, this.keypair) return updated.cid } diff --git a/packages/pds/src/sql-repo-storage.ts b/packages/pds/src/sql-repo-storage.ts index 5b1e74dfb35..93df0d26d68 100644 --- a/packages/pds/src/sql-repo-storage.ts +++ b/packages/pds/src/sql-repo-storage.ts @@ -1,13 +1,15 @@ -import { CommitBlockData, CommitData, RepoStorage } from '@atproto/repo' -import BlockMap from '@atproto/repo/src/block-map' +import { CommitData, RepoStorage, BlockMap, CidSet } from '@atproto/repo' import { chunkArray } from '@atproto/common' import { CID } from 'multiformats/cid' import Database from './db' import { IpldBlock } from './db/tables/ipld-block' import { IpldBlockCreator } from './db/tables/ipld-block-creator' +import { RepoCommitBlock } from './db/tables/repo-commit-block' +import { RepoCommitHistory } from './db/tables/repo-commit-history' export class SqlRepoStorage extends RepoStorage { cache: BlockMap = new BlockMap() + constructor( public db: Database, public did: string, @@ -55,7 +57,7 @@ export class SqlRepoStorage extends RepoStorage { } } - async getSavedBytes(cid: CID): Promise { + async getBytes(cid: CID): Promise { const cached = this.cache.get(cid) if (cached) return cached const found = await this.db.db @@ -74,11 +76,42 @@ export class SqlRepoStorage extends RepoStorage { return found.content } - async hasSavedBytes(cid: CID): Promise { - const got = await this.getSavedBytes(cid) + async has(cid: CID): Promise { + const got = await this.getBytes(cid) return !!got } + async getBlocks(cids: CID[]): Promise<{ blocks: BlockMap; missing: CID[] }> { + const cached = this.cache.getMany(cids) + if (cached.missing.length < 1) return cached + const missing = new CidSet(cached.missing) + const missingStr = cached.missing.map((c) => c.toString()) + const blocks = new BlockMap() + await Promise.all( + chunkArray(missingStr, 500).map(async (batch) => { + const res = await this.db.db + .selectFrom('ipld_block') + .innerJoin( + 'ipld_block_creator as creator', + 'creator.cid', + 'ipld_block.cid', + ) + .where('creator.did', '=', this.did) + .where('ipld_block.cid', 'in', batch) + .select(['ipld_block.cid as cid', 'ipld_block.content as content']) + .execute() + for (const row of res) { + const cid = CID.parse(row.cid) + blocks.set(cid, row.content) + missing.delete(cid) + } + }), + ) + this.cache.addMap(blocks) + blocks.addMap(cached.blocks) + return { blocks, missing: missing.toList() } + } + async putBlock(cid: CID, block: Uint8Array): Promise { this.db.assertTransaction() const insertBlock = this.db.db @@ -119,81 +152,93 @@ export class SqlRepoStorage extends RepoStorage { did: this.did, }) }) - const promises: Promise[] = [] - chunkArray(blocks, 500).forEach((batch) => { - const promise = this.db.db - .insertInto('ipld_block') - .values(batch) - .onConflict((oc) => oc.doNothing()) - .execute() - promises.push(promise) - }) - chunkArray(creators, 500).forEach((batch) => { - const promise = this.db.db - .insertInto('ipld_block_creator') - .values(batch) - .onConflict((oc) => oc.doNothing()) - .execute() - promises.push(promise) - }) - await Promise.all(promises) + const putBlocks = Promise.all( + chunkArray(blocks, 500).map((batch) => + this.db.db + .insertInto('ipld_block') + .values(batch) + .onConflict((oc) => oc.doNothing()) + .execute(), + ), + ) + const putCreators = Promise.all( + chunkArray(creators, 500).map((batch) => + this.db.db + .insertInto('ipld_block_creator') + .values(batch) + .onConflict((oc) => oc.doNothing()) + .execute(), + ), + ) + await Promise.all([putBlocks, putCreators]) } - async applyCommit(commit: CommitData): Promise { + async indexCommits(commits: CommitData[]): Promise { this.db.assertTransaction() - const commitBlocks = commit.blocks.entries().map((block) => ({ - commit: commit.root.toString(), - block: block.cid.toString(), - })) - const insertBlocks = this.putMany(commit.blocks) - const insertCommit = this.db.db - .insertInto('repo_commit_block') - .values(commitBlocks) - .onConflict((oc) => oc.doNothing()) - .execute() - const updateRoot = - commit.prev === null - ? this.insertRoot(commit.root) - : this.updateRoot(commit.root, commit.prev) - const insertCommitHistory = this.db.db - .insertInto('repo_commit_history') - .values({ - commit: commit.root.toString(), + const allBlocks = new BlockMap() + const commitBlocks: RepoCommitBlock[] = [] + const commitHistory: RepoCommitHistory[] = [] + for (const commit of commits) { + for (const block of commit.blocks.entries()) { + commitBlocks.push({ + commit: commit.commit.toString(), + block: block.cid.toString(), + }) + allBlocks.set(block.cid, block.bytes) + } + commitHistory.push({ + commit: commit.commit.toString(), prev: commit.prev ? commit.prev.toString() : null, }) - .onConflict((oc) => oc.doNothing()) - .execute() + } + const insertCommitBlocks = Promise.all( + chunkArray(commitBlocks, 500).map((batch) => + this.db.db + .insertInto('repo_commit_block') + .values(batch) + .onConflict((oc) => oc.doNothing()) + .execute(), + ), + ) + const insertCommitHistory = Promise.all( + chunkArray(commitHistory, 500).map((batch) => + this.db.db + .insertInto('repo_commit_history') + .values(batch) + .onConflict((oc) => oc.doNothing()) + .execute(), + ), + ) await Promise.all([ - insertBlocks, - insertCommit, - updateRoot, + this.putMany(allBlocks), + insertCommitBlocks, insertCommitHistory, ]) } - private async insertRoot(commit: CID): Promise { - await this.db.db - .insertInto('repo_root') - .values({ - did: this.did, - root: commit.toString(), - indexedAt: this.getTimestamp(), - }) - .execute() - } - - private async updateRoot(commit: CID, prev: CID): Promise { - const res = await this.db.db - .updateTable('repo_root') - .set({ - root: commit.toString(), - indexedAt: this.getTimestamp(), - }) - .where('did', '=', this.did) - .where('root', '=', prev.toString()) - .executeTakeFirst() - if (res.numUpdatedRows < 1) { - throw new Error('failed to update repo root: misordered') + async updateHead(cid: CID, prev: CID | null): Promise { + if (prev === null) { + await this.db.db + .insertInto('repo_root') + .values({ + did: this.did, + root: cid.toString(), + indexedAt: this.getTimestamp(), + }) + .execute() + } else { + const res = await this.db.db + .updateTable('repo_root') + .set({ + root: cid.toString(), + indexedAt: this.getTimestamp(), + }) + .where('did', '=', this.did) + .where('root', '=', prev.toString()) + .executeTakeFirst() + if (res.numUpdatedRows < 1) { + throw new Error('failed to update repo root: misordered') + } } } @@ -227,15 +272,11 @@ export class SqlRepoStorage extends RepoStorage { .execute() return res.map((row) => CID.parse(row.commit)).reverse() } - - async getCommits( - latest: CID, - earliest: CID | null, - ): Promise { - const commitPath = await this.getCommitPath(latest, earliest) - if (!commitPath) return null - const commitStrs = commitPath.map((commit) => commit.toString()) - if (commitStrs.length < 1) return [] + async getBlocksForCommits( + commits: CID[], + ): Promise<{ [commit: string]: BlockMap }> { + if (commits.length === 0) return {} + const commitStrs = commits.map((commit) => commit.toString()) const res = await this.db.db .selectFrom('repo_commit_block') .innerJoin('ipld_block', 'ipld_block.cid', 'repo_commit_block.block') @@ -246,18 +287,13 @@ export class SqlRepoStorage extends RepoStorage { ]) .where('commit', 'in', commitStrs) .execute() - const sortedBlocks: { [commit: string]: BlockMap } = {} - res.forEach((row) => { - if (!sortedBlocks[row.commit]) { - sortedBlocks[row.commit] = new BlockMap() - } - sortedBlocks[row.commit].set(CID.parse(row.cid), row.content) - }) - return commitPath.map((commit) => ({ - root: commit, - prev: null, - blocks: sortedBlocks[commit.toString()] || new BlockMap(), - })) + return res.reduce((acc, cur) => { + acc[cur.commit] ??= new BlockMap() + const cid = CID.parse(cur.cid) + acc[cur.commit].set(cid, cur.content) + this.cache.set(cid, cur.content) + return acc + }, {}) } async destroy(): Promise { diff --git a/packages/pds/tests/sql-repo-storage.test.ts b/packages/pds/tests/sql-repo-storage.test.ts index ff0da4cf421..b090f9f2b35 100644 --- a/packages/pds/tests/sql-repo-storage.test.ts +++ b/packages/pds/tests/sql-repo-storage.test.ts @@ -1,4 +1,5 @@ import { range, valueToIpldBlock } from '@atproto/common' +import { def } from '@atproto/repo' import BlockMap from '@atproto/repo/src/block-map' import { Database } from '../src' import SqlRepoStorage from '../src/sql-repo-storage' @@ -31,7 +32,7 @@ describe('sql repo storage', () => { }) const storage = new SqlRepoStorage(db, did) - const value = await storage.getUnchecked(cid) + const value = await storage.readObj(cid, def.unknown) expect(value).toEqual({ my: 'block' }) }) @@ -77,12 +78,12 @@ describe('sql repo storage', () => { await db.transaction(async (dbTxn) => { const storage = new SqlRepoStorage(dbTxn, did) await storage.applyCommit({ - root: commits[0].cid, + commit: commits[0].cid, prev: null, blocks: blocks0, }) await storage.applyCommit({ - root: commits[1].cid, + commit: commits[1].cid, prev: commits[0].cid, blocks: blocks1, }) @@ -106,9 +107,9 @@ describe('sql repo storage', () => { throw new Error('could not get commit data') } expect(commitData.length).toBe(2) - expect(commitData[0].root.equals(commits[0].cid)).toBeTruthy() + expect(commitData[0].commit.equals(commits[0].cid)).toBeTruthy() expect(commitData[0].blocks.equals(blocks0)).toBeTruthy() - expect(commitData[1].root.equals(commits[1].cid)).toBeTruthy() + expect(commitData[1].commit.equals(commits[1].cid)).toBeTruthy() expect(commitData[1].blocks.equals(blocks1)).toBeTruthy() }) }) diff --git a/packages/pds/tests/sync.test.ts b/packages/pds/tests/sync.test.ts new file mode 100644 index 00000000000..a39df1e48d9 --- /dev/null +++ b/packages/pds/tests/sync.test.ts @@ -0,0 +1,158 @@ +import AtpApi, { ServiceClient as AtpServiceClient } from '@atproto/api' +import { randomStr } from '@atproto/crypto' +import { DidResolver } from '@atproto/did-resolver' +import * as repo from '@atproto/repo' +import { MemoryBlockstore } from '@atproto/repo' +import { AtUri } from '@atproto/uri' +import { CID } from 'multiformats/cid' +import { CloseFn, runTestServer } from './_util' + +describe('repo sync', () => { + let client: AtpServiceClient + let did: string + + const repoData: repo.RepoContents = {} + const uris: AtUri[] = [] + const storage = new MemoryBlockstore() + let didResolver: DidResolver + let currRoot: CID | undefined + + let close: CloseFn + + beforeAll(async () => { + const server = await runTestServer({ + dbPostgresSchema: 'repo_sync', + }) + close = server.close + client = AtpApi.service(server.url) + const res = await client.com.atproto.account.create({ + email: 'alice@test.com', + handle: 'alice.test', + password: 'alice-pass', + }) + client.setHeader('authorization', `Bearer ${res.data.accessJwt}`) + did = res.data.did + didResolver = new DidResolver({ plcUrl: server.ctx.cfg.didPlcUrl }) + repoData['app.bsky.system.declaration'] = { + self: { + $type: 'app.bsky.system.declaration', + actorType: 'app.bsky.system.actorUser', + }, + } + }) + + afterAll(async () => { + await close() + }) + + it('creates and syncs some records', async () => { + const ADD_COUNT = 10 + for (let i = 0; i < ADD_COUNT; i++) { + const { obj, uri } = await makePost(client, did) + if (!repoData[uri.collection]) { + repoData[uri.collection] = {} + } + repoData[uri.collection][uri.rkey] = obj + uris.push(uri) + } + + const car = await client.com.atproto.sync.getRepo({ did }) + const synced = await repo.loadFullRepo( + storage, + new Uint8Array(car.data), + didResolver, + ) + expect(synced.ops.length).toBe(ADD_COUNT + 1) // +1 because of declaration + const loaded = await repo.Repo.load(storage, synced.root) + const contents = await loaded.getContents() + expect(contents).toEqual(repoData) + + currRoot = synced.root + }) + + it('syncs creates and deletes', async () => { + const ADD_COUNT = 10 + const DEL_COUNT = 4 + for (let i = 0; i < ADD_COUNT; i++) { + const { obj, uri } = await makePost(client, did) + if (!repoData[uri.collection]) { + repoData[uri.collection] = {} + } + repoData[uri.collection][uri.rkey] = obj + uris.push(uri) + } + // delete two that are already sync & two that have not been + for (let i = 0; i < DEL_COUNT; i++) { + const uri = uris[i * 5] + await client.app.bsky.feed.post.delete({ + did, + colleciton: uri.collection, + rkey: uri.rkey, + }) + delete repoData[uri.collection][uri.rkey] + } + + const car = await client.com.atproto.sync.getRepo({ + did, + from: currRoot?.toString(), + }) + const currRepo = await repo.Repo.load(storage, currRoot) + const synced = await repo.loadDiff( + currRepo, + new Uint8Array(car.data), + didResolver, + ) + expect(synced.ops.length).toBe(ADD_COUNT) // -2 because of dels of new records, +2 because of dels of old records + const loaded = await repo.Repo.load(storage, synced.root) + const contents = await loaded.getContents() + expect(contents).toEqual(repoData) + + currRoot = synced.root + }) + + it('syncs current root', async () => { + const root = await client.com.atproto.sync.getHead({ did }) + expect(root.data.root).toEqual(currRoot?.toString()) + }) + + it('syncs commit path', async () => { + const local = await storage.getCommitPath(currRoot as CID, null) + if (!local) { + throw new Error('Could not get local commit path') + } + const localStr = local.map((c) => c.toString()) + const commitPath = await client.com.atproto.sync.getCommitPath({ did }) + expect(commitPath.data.commits).toEqual(localStr) + + const partialCommitPath = await client.com.atproto.sync.getCommitPath({ + did, + earliest: localStr[2], + latest: localStr[15], + }) + expect(partialCommitPath.data.commits).toEqual(localStr.slice(3, 16)) + }) + + it('sync a repo checkout', async () => { + const car = await client.com.atproto.sync.getCheckout({ did }) + const checkoutStorage = new MemoryBlockstore() + const loaded = await repo.loadCheckout( + checkoutStorage, + new Uint8Array(car.data), + didResolver, + ) + expect(loaded.contents).toEqual(repoData) + const loadedRepo = await repo.Repo.load(checkoutStorage, loaded.root) + expect(await loadedRepo.getContents()).toEqual(repoData) + }) +}) + +const makePost = async (client: AtpServiceClient, did: string) => { + const obj = { + $type: 'app.bsky.feed.post', + text: randomStr(32, 'base32'), + createdAt: new Date().toISOString(), + } + const res = await client.app.bsky.feed.post.create({ did }, obj) + const uri = new AtUri(res.uri) + return { obj, uri } +} diff --git a/packages/pds/tests/views/votes.test.ts b/packages/pds/tests/views/votes.test.ts index 3a34104bff6..58089f11881 100644 --- a/packages/pds/tests/views/votes.test.ts +++ b/packages/pds/tests/views/votes.test.ts @@ -161,7 +161,6 @@ describe('pds vote views', () => { } post = await getPost() - console.log(post.thread) expect( (post.thread.post as AppBskyFeedGetPostThread.ThreadViewPost) .downvoteCount, diff --git a/packages/pds/tsconfig.json b/packages/pds/tsconfig.json index 346af1d02a6..5402bfe5beb 100644 --- a/packages/pds/tsconfig.json +++ b/packages/pds/tsconfig.json @@ -8,7 +8,6 @@ "include": ["./src","__tests__/**/**.ts"], "references": [ { "path": "../api/tsconfig.build.json" }, - { "path": "../auth/tsconfig.build.json" }, { "path": "../common/tsconfig.build.json" }, { "path": "../crypto/tsconfig.build.json" }, { "path": "../did-resolver/tsconfig.build.json" }, diff --git a/packages/plc/src/client/index.ts b/packages/plc/src/client/index.ts index eaa798edf92..806264dfd4f 100644 --- a/packages/plc/src/client/index.ts +++ b/packages/plc/src/client/index.ts @@ -1,6 +1,6 @@ import axios from 'axios' import { CID } from 'multiformats/cid' -import { DidableKey } from '@atproto/crypto' +import { Keypair } from '@atproto/crypto' import { check, cidForData } from '@atproto/common' import * as operations from '../lib/operations' import * as t from '../lib/types' @@ -28,7 +28,7 @@ export class PlcClient { } async createDid( - signingKey: DidableKey, + signingKey: Keypair, recoveryKey: string, handle: string, service: string, @@ -53,7 +53,7 @@ export class PlcClient { async rotateSigningKey( did: string, newKey: string, - signingKey: DidableKey, + signingKey: Keypair, prev?: CID, ) { prev = prev ? prev : await this.getPrev(did) @@ -68,7 +68,7 @@ export class PlcClient { async rotateRecoveryKey( did: string, newKey: string, - signingKey: DidableKey, + signingKey: Keypair, prev?: CID, ) { prev = prev ? prev : await this.getPrev(did) @@ -80,7 +80,7 @@ export class PlcClient { await axios.post(this.postOpUrl(did), op) } - async updateHandle(did: string, handle: string, signingKey: DidableKey) { + async updateHandle(did: string, handle: string, signingKey: Keypair) { const prev = await this.getPrev(did) const op = await operations.updateHandle( handle, @@ -90,7 +90,7 @@ export class PlcClient { await axios.post(this.postOpUrl(did), op) } - async updateAtpPds(did: string, service: string, signingKey: DidableKey) { + async updateAtpPds(did: string, service: string, signingKey: Keypair) { const prev = await this.getPrev(did) const op = await operations.updateAtpPds( service, diff --git a/packages/plc/src/lib/document.ts b/packages/plc/src/lib/document.ts index b025384e3d0..4b875069d1d 100644 --- a/packages/plc/src/lib/document.ts +++ b/packages/plc/src/lib/document.ts @@ -152,7 +152,7 @@ export const assureValidSig = async ( const dataBytes = new Uint8Array(cbor.encode(opData)) let isValid = true for (const did of allowedDids) { - isValid = await crypto.verifyDidSig(did, dataBytes, sigBytes) + isValid = await crypto.verifySignature(did, dataBytes, sigBytes) if (isValid) return } throw new ServerError(400, `Invalid signature on op: ${JSON.stringify(op)}`) diff --git a/packages/plc/src/lib/operations.ts b/packages/plc/src/lib/operations.ts index d3de446c70b..62317ec6d23 100644 --- a/packages/plc/src/lib/operations.ts +++ b/packages/plc/src/lib/operations.ts @@ -1,6 +1,6 @@ import * as cbor from '@ipld/dag-cbor' import * as uint8arrays from 'uint8arrays' -import { DidableKey, sha256 } from '@atproto/crypto' +import { Keypair, sha256 } from '@atproto/crypto' import * as t from './types' export const didForCreateOp = async (op: t.CreateOp, truncate = 24) => { @@ -12,7 +12,7 @@ export const didForCreateOp = async (op: t.CreateOp, truncate = 24) => { export const signOperation = async ( op: t.UnsignedOperation, - signingKey: DidableKey, + signingKey: Keypair, ): Promise => { const data = new Uint8Array(cbor.encode(op)) const sig = await signingKey.sign(data) @@ -23,7 +23,7 @@ export const signOperation = async ( } export const create = async ( - signingKey: DidableKey, + signingKey: Keypair, recoveryKey: string, handle: string, service: string, @@ -43,7 +43,7 @@ export const create = async ( export const rotateSigningKey = async ( newKey: string, prev: string, - signingKey: DidableKey, + signingKey: Keypair, ): Promise => { const op: t.UnsignedRotateSigningKeyOp = { type: 'rotate_signing_key', @@ -56,7 +56,7 @@ export const rotateSigningKey = async ( export const rotateRecoveryKey = async ( newKey: string, prev: string, - signingKey: DidableKey, + signingKey: Keypair, ): Promise => { const op: t.UnsignedRotateRecoveryKeyOp = { type: 'rotate_recovery_key', @@ -69,7 +69,7 @@ export const rotateRecoveryKey = async ( export const updateHandle = async ( handle: string, prev: string, - signingKey: DidableKey, + signingKey: Keypair, ): Promise => { const op: t.UnsignedUpdateHandleOp = { type: 'update_handle', @@ -82,7 +82,7 @@ export const updateHandle = async ( export const updateAtpPds = async ( service: string, prev: string, - signingKey: DidableKey, + signingKey: Keypair, ): Promise => { const op: t.UnsignedUpdateAtpPdsOp = { type: 'update_atp_pds', diff --git a/packages/repo/bench/mst.bench.ts b/packages/repo/bench/mst.bench.ts index 7bd4a26bfae..0cc2fe8e05b 100644 --- a/packages/repo/bench/mst.bench.ts +++ b/packages/repo/bench/mst.bench.ts @@ -44,7 +44,7 @@ describe('MST Benchmarks', () => { const doneAdding = Date.now() - const root = await util.saveMst(mst) + const root = await util.saveMst(blockstore, mst) const doneSaving = Date.now() @@ -63,7 +63,10 @@ describe('MST Benchmarks', () => { let proofSize = 0 for (const entry of path) { if (entry.isTree()) { - const bytes = await blockstore.guaranteeBytes(entry.pointer) + const bytes = await blockstore.getBytes(entry.pointer) + if (!bytes) { + throw new Error(`Bytes not found: ${entry.pointer}`) + } proofSize += bytes.byteLength } } diff --git a/packages/repo/bench/repo.bench.ts b/packages/repo/bench/repo.bench.ts index ecdf6d5d2b6..a149337b95d 100644 --- a/packages/repo/bench/repo.bench.ts +++ b/packages/repo/bench/repo.bench.ts @@ -1,21 +1,20 @@ -import * as auth from '@atproto/auth' import { TID } from '@atproto/common' +import * as crypto from '@atproto/crypto' +import { Secp256k1Keypair } from '@atproto/crypto' import { MemoryBlockstore, Repo, WriteOpAction } from '../src' import * as util from '../tests/_util' describe('Repo Benchmarks', () => { - const verifier = new auth.Verifier() const size = 10000 let blockstore: MemoryBlockstore - let authStore: auth.AuthStore + let keypair: crypto.Keypair let repo: Repo beforeAll(async () => { blockstore = new MemoryBlockstore() - authStore = await verifier.createTempAuthStore() - await authStore.claimFull() - repo = await Repo.create(blockstore, await authStore.did(), authStore) + keypair = await Secp256k1Keypair.create() + repo = await Repo.create(blockstore, await keypair.did(), keypair) }) it('calculates size', async () => { @@ -40,7 +39,7 @@ describe('Repo Benchmarks', () => { createdAt: new Date().toISOString(), }, }, - authStore, + keypair, ) } diff --git a/packages/repo/package.json b/packages/repo/package.json index ef0c4cf32c2..b9d562ec04f 100644 --- a/packages/repo/package.json +++ b/packages/repo/package.json @@ -18,8 +18,9 @@ "postbuild": "tsc --build tsconfig.build.json" }, "dependencies": { - "@atproto/auth": "*", "@atproto/common": "*", + "@atproto/crypto": "*", + "@atproto/did-resolver": "*", "@atproto/nsid": "*", "@ipld/car": "^3.2.3", "@ipld/dag-cbor": "^7.0.0", diff --git a/packages/repo/src/block-map.ts b/packages/repo/src/block-map.ts index 22e69805761..d130ff4aa96 100644 --- a/packages/repo/src/block-map.ts +++ b/packages/repo/src/block-map.ts @@ -19,6 +19,20 @@ export class BlockMap { return this.map.get(cid.toString()) } + getMany(cids: CID[]): { blocks: BlockMap; missing: CID[] } { + const missing: CID[] = [] + const blocks = new BlockMap() + for (const cid of cids) { + const got = this.map.get(cid.toString()) + if (got) { + blocks.set(cid, got) + } else { + missing.push(cid) + } + } + return { blocks, missing } + } + has(cid: CID): boolean { return this.map.has(cid.toString()) } diff --git a/packages/repo/src/cid-set.ts b/packages/repo/src/cid-set.ts index 2d1c7daac08..a639c967dd8 100644 --- a/packages/repo/src/cid-set.ts +++ b/packages/repo/src/cid-set.ts @@ -42,8 +42,7 @@ export class CidSet { } toList(): CID[] { - const arr = [...this.set] - return arr.map((c) => CID.parse(c)) + return [...this.set].map((c) => CID.parse(c)) } } diff --git a/packages/repo/src/data-diff.ts b/packages/repo/src/data-diff.ts new file mode 100644 index 00000000000..1e3e71230f8 --- /dev/null +++ b/packages/repo/src/data-diff.ts @@ -0,0 +1,108 @@ +import { CID } from 'multiformats' +import CidSet from './cid-set' +import { MST, mstDiff } from './mst' +import { DataStore } from './types' + +export class DataDiff { + adds: Record = {} + updates: Record = {} + deletes: Record = {} + + newCids: CidSet = new CidSet() + + static async of(curr: DataStore, prev: DataStore | null): Promise { + if (curr instanceof MST && (prev === null || prev instanceof MST)) { + return mstDiff(curr, prev) + } + throw new Error('Unsupported DataStore type for diff') + } + + recordAdd(key: string, cid: CID): void { + this.adds[key] = { key, cid } + this.newCids.add(cid) + } + + recordUpdate(key: string, prev: CID, cid: CID): void { + this.updates[key] = { key, prev, cid } + this.newCids.add(cid) + } + + recordDelete(key: string, cid: CID): void { + this.deletes[key] = { key, cid } + } + + recordNewCid(cid: CID): void { + this.newCids.add(cid) + } + + addDiff(diff: DataDiff) { + for (const add of diff.addList()) { + if (this.deletes[add.key]) { + const del = this.deletes[add.key] + if (del.cid !== add.cid) { + this.recordUpdate(add.key, del.cid, add.cid) + } + delete this.deletes[add.key] + } else { + this.recordAdd(add.key, add.cid) + } + } + for (const update of diff.updateList()) { + this.recordUpdate(update.key, update.prev, update.cid) + delete this.adds[update.key] + delete this.deletes[update.key] + } + for (const del of diff.deleteList()) { + if (this.adds[del.key]) { + delete this.adds[del.key] + } else { + delete this.updates[del.key] + this.recordDelete(del.key, del.cid) + } + } + this.newCids.addSet(diff.newCids) + } + + addList(): DataAdd[] { + return Object.values(this.adds) + } + + updateList(): DataUpdate[] { + return Object.values(this.updates) + } + + deleteList(): DataDelete[] { + return Object.values(this.deletes) + } + + newCidList(): CID[] { + return this.newCids.toList() + } + + updatedKeys(): string[] { + const keys = [ + ...Object.keys(this.adds), + ...Object.keys(this.updates), + ...Object.keys(this.deletes), + ] + return [...new Set(keys)] + } +} + +export type DataAdd = { + key: string + cid: CID +} + +export type DataUpdate = { + key: string + prev: CID + cid: CID +} + +export type DataDelete = { + key: string + cid: CID +} + +export default DataDiff diff --git a/packages/repo/src/error.ts b/packages/repo/src/error.ts new file mode 100644 index 00000000000..ec2293b2a85 --- /dev/null +++ b/packages/repo/src/error.ts @@ -0,0 +1,32 @@ +import { Def } from '@atproto/common/src/check' +import { CID } from 'multiformats/cid' + +export class MissingBlockError extends Error { + constructor(public cid: CID, def?: Def) { + let msg = `block not found: ${cid.toString()}` + if (def) { + msg += `, expected type: ${def.name}` + } + super(msg) + } +} + +export class MissingBlocksError extends Error { + constructor(public context: string, public cids: CID[]) { + const cidStr = cids.map((c) => c.toString()) + super(`missing ${context} blocks: ${cidStr}`) + } +} + +export class MissingCommitBlocksError extends Error { + constructor(public commit: CID, public cids: CID[]) { + const cidStr = cids.map((c) => c.toString()) + super(`missing blocks for commit ${commit.toString()}: ${cidStr}`) + } +} + +export class UnexpectedObjectError extends Error { + constructor(public cid: CID, public def: Def) { + super(`unexpected object at ${cid.toString()}, expected: ${def.name}`) + } +} diff --git a/packages/repo/src/index.ts b/packages/repo/src/index.ts index e1fdda71ea3..db914ccd148 100644 --- a/packages/repo/src/index.ts +++ b/packages/repo/src/index.ts @@ -1,6 +1,9 @@ +export * from './block-map' +export * from './cid-set' export * from './repo' export * from './mst' export * from './storage' +export * from './sync' export * from './types' export * from './verify' export * from './util' diff --git a/packages/repo/src/mst/diff.ts b/packages/repo/src/mst/diff.ts index a4ca15af653..37ca66a2267 100644 --- a/packages/repo/src/mst/diff.ts +++ b/packages/repo/src/mst/diff.ts @@ -1,106 +1,129 @@ -import * as auth from '@atproto/auth' -import { CID } from 'multiformats' -import CidSet from '../cid-set' -import { parseRecordKey } from '../util' - -export class DataDiff { - adds: Record = {} - updates: Record = {} - deletes: Record = {} - - newCids: CidSet = new CidSet() - - recordAdd(key: string, cid: CID): void { - this.adds[key] = { key, cid } - this.newCids.add(cid) - } - - recordUpdate(key: string, prev: CID, cid: CID): void { - this.updates[key] = { key, prev, cid } - this.newCids.add(cid) - } - - recordDelete(key: string, cid: CID): void { - this.deletes[key] = { key, cid } +import { DataDiff } from '../data-diff' +import MST from './mst' +import MstWalker from './walker' + +export const nullDiff = async (tree: MST): Promise => { + const diff = new DataDiff() + for await (const entry of tree.walk()) { + if (entry.isLeaf()) { + diff.recordAdd(entry.key, entry.value) + } else { + diff.recordNewCid(entry.pointer) + } } + return diff +} - recordNewCid(cid: CID): void { - this.newCids.add(cid) +export const mstDiff = async ( + curr: MST, + prev: MST | null, +): Promise => { + await curr.getPointer() + if (prev === null) { + return nullDiff(curr) } - addDiff(diff: DataDiff) { - for (const add of diff.addList()) { - if (this.deletes[add.key]) { - const del = this.deletes[add.key] - if (del.cid !== add.cid) { - this.recordUpdate(add.key, del.cid, add.cid) - } - delete this.deletes[add.key] + await prev.getPointer() + const diff = new DataDiff() + + const leftWalker = new MstWalker(prev) + const rightWalker = new MstWalker(curr) + while (!leftWalker.status.done || !rightWalker.status.done) { + // if one walker is finished, continue walking the other & logging all nodes + if (leftWalker.status.done && !rightWalker.status.done) { + const node = rightWalker.status.curr + if (node.isLeaf()) { + diff.recordAdd(node.key, node.value) } else { - this.recordAdd(add.key, add.cid) + diff.recordNewCid(node.pointer) } + await rightWalker.advance() + continue + } else if (!leftWalker.status.done && rightWalker.status.done) { + const node = leftWalker.status.curr + if (node.isLeaf()) { + diff.recordDelete(node.key, node.value) + } + await leftWalker.advance() + continue } - for (const update of diff.updateList()) { - this.recordUpdate(update.key, update.prev, update.cid) - delete this.adds[update.key] - delete this.deletes[update.key] - } - for (const del of diff.deleteList()) { - if (this.adds[del.key]) { - delete this.adds[del.key] + if (leftWalker.status.done || rightWalker.status.done) break + const left = leftWalker.status.curr + const right = rightWalker.status.curr + if (left === null || right === null) break + + // if both pointers are leaves, record an update & advance both or record the lowest key and advance that pointer + if (left.isLeaf() && right.isLeaf()) { + if (left.key === right.key) { + if (!left.value.equals(right.value)) { + diff.recordUpdate(left.key, left.value, right.value) + } + await leftWalker.advance() + await rightWalker.advance() + } else if (left.key < right.key) { + diff.recordDelete(left.key, left.value) + await leftWalker.advance() } else { - delete this.updates[del.key] - this.recordDelete(del.key, del.cid) + diff.recordAdd(right.key, right.value) + await rightWalker.advance() } + continue } - this.newCids.addSet(diff.newCids) - } - addList(): DataAdd[] { - return Object.values(this.adds) - } - - updateList(): DataUpdate[] { - return Object.values(this.updates) - } - - deleteList(): DataDelete[] { - return Object.values(this.deletes) - } + // next, ensure that we're on the same layer + // if one walker is at a higher layer than the other, we need to do one of two things + // if the higher walker is pointed at a tree, step into that tree to try to catch up with the lower + // if the higher walker is pointed at a leaf, then advance the lower walker to try to catch up the higher + if (leftWalker.layer() > rightWalker.layer()) { + if (left.isLeaf()) { + if (right.isLeaf()) { + diff.recordAdd(right.key, right.value) + } else { + diff.recordNewCid(right.pointer) + } + await rightWalker.advance() + } else { + await leftWalker.stepInto() + } + continue + } else if (leftWalker.layer() < rightWalker.layer()) { + if (right.isLeaf()) { + if (left.isLeaf()) { + diff.recordDelete(left.key, left.value) + } + await leftWalker.advance() + } else { + diff.recordNewCid(right.pointer) + await rightWalker.stepInto() + } + continue + } - newCidList(): CID[] { - return this.newCids.toList() - } + // if we're on the same level, and both pointers are trees, do a comparison + // if they're the same, step over. if they're different, step in to find the subdiff + if (left.isTree() && right.isTree()) { + if (left.pointer.equals(right.pointer)) { + await leftWalker.stepOver() + await rightWalker.stepOver() + } else { + diff.recordNewCid(right.pointer) + await leftWalker.stepInto() + await rightWalker.stepInto() + } + continue + } - updatedKeys(): string[] { - const keys = [ - ...Object.keys(this.adds), - ...Object.keys(this.updates), - ...Object.keys(this.deletes), - ] - return [...new Set(keys)] - } + // finally, if one pointer is a tree and the other is a leaf, simply step into the tree + if (left.isLeaf() && right.isTree()) { + await diff.recordNewCid(right.pointer) + await rightWalker.stepInto() + continue + } else if (left.isTree() && right.isLeaf()) { + await leftWalker.stepInto() + continue + } - neededCapabilities(rootDid: string): auth.ucans.Capability[] { - return this.updatedKeys().map((key) => { - const { collection, rkey } = parseRecordKey(key) - return auth.writeCap(rootDid, collection, rkey) - }) + throw new Error('Unidentifiable case in diff walk') } -} - -export type DataAdd = { - key: string - cid: CID -} - -export type DataUpdate = { - key: string - prev: CID - cid: CID -} - -export type DataDelete = { - key: string - cid: CID + return diff } diff --git a/packages/repo/src/mst/mst.ts b/packages/repo/src/mst/mst.ts index f327e3a790b..92476561424 100644 --- a/packages/repo/src/mst/mst.ts +++ b/packages/repo/src/mst/mst.ts @@ -1,15 +1,15 @@ import z from 'zod' import { CID } from 'multiformats' -import { RepoStorage } from '../storage' -import { def, cidForData } from '@atproto/common' -import { DataDiff } from './diff' +import { ReadableBlockstore } from '../storage' +import { schema as common, cidForData } from '@atproto/common' import { DataStore } from '../types' import { BlockWriter } from '@ipld/car/api' import * as util from './util' -import MstWalker from './walker' import BlockMap from '../block-map' import CidSet from '../cid-set' +import { MissingBlocksError } from '../error' +import * as parse from '../parse' /** * This is an implementation of a Merkle Search Tree (MST) @@ -41,18 +41,23 @@ import CidSet from '../cid-set' * Then the first will be described as `prefix: 0, key: 'bsky/posts/abcdefg'`, * and the second will be described as `prefix: 16, key: 'hi'.` */ -const subTreePointer = z.nullable(def.cid) +const subTreePointer = z.nullable(common.cid) const treeEntry = z.object({ p: z.number(), // prefix count of utf-8 chars that this key shares with the prev key k: z.string(), // the rest of the key outside the shared prefix - v: def.cid, // value + v: common.cid, // value t: subTreePointer, // next subtree (to the right of leaf) }) -export const nodeDataDef = z.object({ +const nodeData = z.object({ l: subTreePointer, // left-most subtree e: z.array(treeEntry), //entries }) -export type NodeData = z.infer +export type NodeData = z.infer + +export const nodeDataDef = { + name: 'mst node', + schema: nodeData, +} export type NodeEntry = MST | Leaf @@ -64,7 +69,7 @@ export type MstOpts = { } export class MST implements DataStore { - storage: RepoStorage + storage: ReadableBlockstore fanout: Fanout entries: NodeEntry[] | null layer: number | null @@ -72,7 +77,7 @@ export class MST implements DataStore { outdatedPointer = false constructor( - storage: RepoStorage, + storage: ReadableBlockstore, fanout: Fanout, pointer: CID, entries: NodeEntry[] | null, @@ -86,7 +91,7 @@ export class MST implements DataStore { } static async create( - storage: RepoStorage, + storage: ReadableBlockstore, entries: NodeEntry[] = [], opts?: Partial, ): Promise { @@ -96,7 +101,7 @@ export class MST implements DataStore { } static async fromData( - storage: RepoStorage, + storage: ReadableBlockstore, data: NodeData, opts?: Partial, ): Promise { @@ -106,7 +111,11 @@ export class MST implements DataStore { return new MST(storage, fanout, pointer, entries, layer) } - static load(storage: RepoStorage, cid: CID, opts?: Partial): MST { + static load( + storage: ReadableBlockstore, + cid: CID, + opts?: Partial, + ): MST { const { layer = null, fanout = DEFAULT_MST_FANOUT } = opts || {} return new MST(storage, fanout, cid, null, layer) } @@ -134,7 +143,7 @@ export class MST implements DataStore { async getEntries(): Promise { if (this.entries) return [...this.entries] if (this.pointer) { - const data = await this.storage.get(this.pointer, nodeDataDef) + const data = await this.storage.readObj(this.pointer, nodeDataDef) const firstLeaf = data.e[0] const layer = firstLeaf !== undefined @@ -200,31 +209,17 @@ export class MST implements DataStore { // ------------------- // Return the necessary blocks to persist the MST to repo storage - // If the topmost tree only has one entry and it's a subtree, we can eliminate the topmost tree - // However, lower trees with only one entry must be preserved - async blockDiff(): Promise<{ root: CID; blocks: BlockMap }> { - return this.blockDiffRecurse(true) - } - - async blockDiffRecurse( - trimTop = false, - ): Promise<{ root: CID; blocks: BlockMap }> { + async getUnstoredBlocks(): Promise<{ root: CID; blocks: BlockMap }> { const blocks = new BlockMap() const pointer = await this.getPointer() const alreadyHas = await this.storage.has(pointer) if (alreadyHas) return { root: pointer, blocks } const entries = await this.getEntries() - if (entries.length === 1 && trimTop) { - const node = entries[0] - if (node.isTree()) { - return node.blockDiffRecurse(true) - } - } const data = util.serializeNodeData(entries) await blocks.add(data) for (const entry of entries) { if (entry.isTree()) { - const subtree = await entry.blockDiffRecurse(false) + const subtree = await entry.getUnstoredBlocks() blocks.addMap(subtree.blocks) } } @@ -335,6 +330,11 @@ export class MST implements DataStore { // Deletes the value at the given key async delete(key: string): Promise { + const altered = await this.deleteRecurse(key) + return altered.trimTop() + } + + async deleteRecurse(key: string): Promise { const index = await this.findGtOrEqualLeafIndex(key) const found = await this.atIndex(index) // if found, remove it on this level @@ -355,7 +355,7 @@ export class MST implements DataStore { // else recurse down to find it const prev = await this.atIndex(index - 1) if (prev?.isTree()) { - const subtree = await prev.delete(key) + const subtree = await prev.deleteRecurse(key) const subTreeEntries = await subtree.getEntries() if (subTreeEntries.length === 0) { return this.removeEntry(index - 1) @@ -367,114 +367,6 @@ export class MST implements DataStore { } } - // Walk two MSTs to find the semantic changes - async diff(other: MST): Promise { - await this.getPointer() - await other.getPointer() - const diff = new DataDiff() - - const leftWalker = new MstWalker(this) - const rightWalker = new MstWalker(other) - while (!leftWalker.status.done || !rightWalker.status.done) { - // if one walker is finished, continue walking the other & logging all nodes - if (leftWalker.status.done && !rightWalker.status.done) { - const node = rightWalker.status.curr - if (node.isLeaf()) { - diff.recordAdd(node.key, node.value) - } else { - diff.recordNewCid(node.pointer) - } - await rightWalker.advance() - continue - } else if (!leftWalker.status.done && rightWalker.status.done) { - const node = leftWalker.status.curr - if (node.isLeaf()) { - diff.recordDelete(node.key, node.value) - } - await leftWalker.advance() - continue - } - if (leftWalker.status.done || rightWalker.status.done) break - const left = leftWalker.status.curr - const right = rightWalker.status.curr - if (left === null || right === null) break - - // if both pointers are leaves, record an update & advance both or record the lowest key and advance that pointer - if (left.isLeaf() && right.isLeaf()) { - if (left.key === right.key) { - if (!left.value.equals(right.value)) { - diff.recordUpdate(left.key, left.value, right.value) - } - await leftWalker.advance() - await rightWalker.advance() - } else if (left.key < right.key) { - diff.recordDelete(left.key, left.value) - await leftWalker.advance() - } else { - diff.recordAdd(right.key, right.value) - await rightWalker.advance() - } - continue - } - - // next, ensure that we're on the same layer - // if one walker is at a higher layer than the other, we need to do one of two things - // if the higher walker is pointed at a tree, step into that tree to try to catch up with the lower - // if the higher walker is pointed at a leaf, then advance the lower walker to try to catch up the higher - if (leftWalker.layer() > rightWalker.layer()) { - if (left.isLeaf()) { - if (right.isLeaf()) { - diff.recordAdd(right.key, right.value) - } else { - diff.recordNewCid(right.pointer) - } - await rightWalker.advance() - } else { - await leftWalker.stepInto() - } - continue - } else if (leftWalker.layer() < rightWalker.layer()) { - if (right.isLeaf()) { - if (left.isLeaf()) { - diff.recordDelete(left.key, left.value) - } - await leftWalker.advance() - } else { - diff.recordNewCid(right.pointer) - await rightWalker.stepInto() - } - continue - } - - // if we're on the same level, and both pointers are trees, do a comparison - // if they're the same, step over. if they're different, step in to find the subdiff - if (left.isTree() && right.isTree()) { - if (left.pointer.equals(right.pointer)) { - await leftWalker.stepOver() - await rightWalker.stepOver() - } else { - diff.recordNewCid(right.pointer) - await leftWalker.stepInto() - await rightWalker.stepInto() - } - continue - } - - // finally, if one pointer is a tree and the other is a leaf, simply step into the tree - if (left.isLeaf() && right.isTree()) { - await diff.recordNewCid(right.pointer) - await rightWalker.stepInto() - continue - } else if (left.isTree() && right.isLeaf()) { - await leftWalker.stepInto() - continue - } - - throw new Error('Unidentifiable case in diff walk') - } - return diff - } - // Simple Operations // ------------------- @@ -549,6 +441,16 @@ export class MST implements DataStore { return this.newTree(update) } + // if the topmost node in the tree only points to another tree, trim the top and return the subtree + async trimTop(): Promise { + const entries = await this.getEntries() + if (entries.length === 1 && entries[0].isTree()) { + return entries[0].trimTop() + } else { + return this + } + } + // Subtree & Splits // ------------------- @@ -666,7 +568,11 @@ export class MST implements DataStore { } } - async list(count: number, after?: string, before?: string): Promise { + async list( + count = Number.MAX_SAFE_INTEGER, + after?: string, + before?: string, + ): Promise { const vals: Leaf[] = [] for await (const leaf of this.walkLeavesFrom(after || '')) { if (leaf.key === after) continue @@ -766,14 +672,46 @@ export class MST implements DataStore { // Sync Protocol async writeToCarStream(car: BlockWriter): Promise { - for await (const entry of this.walk()) { - if (entry.isTree()) { - const pointer = await entry.getPointer() - await this.storage.addToCar(car, pointer) + const entries = await this.getEntries() + const leaves = new CidSet() + let toFetch = new CidSet() + toFetch.add(await this.getPointer()) + for (const entry of entries) { + if (entry.isLeaf()) { + leaves.add(entry.value) } else { - await this.storage.addToCar(car, entry.value) + toFetch.add(await entry.getPointer()) } } + while (toFetch.size() > 0) { + const nextLayer = new CidSet() + const fetched = await this.storage.getBlocks(toFetch.toList()) + if (fetched.missing.length > 0) { + throw new MissingBlocksError('mst node', fetched.missing) + } + for (const cid of toFetch.toList()) { + const found = await parse.getAndParse(fetched.blocks, cid, nodeDataDef) + await car.put({ cid, bytes: found.bytes }) + const entries = await util.deserializeNodeData(this.storage, found.obj) + + for (const entry of entries) { + if (entry.isLeaf()) { + leaves.add(entry.value) + } else { + nextLayer.add(await entry.getPointer()) + } + } + } + toFetch = nextLayer + } + const leafData = await this.storage.getBlocks(leaves.toList()) + if (leafData.missing.length > 0) { + throw new MissingBlocksError('mst leaf', leafData.missing) + } + + for (const leaf of leafData.blocks.entries()) { + await car.put(leaf) + } } // Matching Leaf interface diff --git a/packages/repo/src/mst/util.ts b/packages/repo/src/mst/util.ts index ac50ddb3080..4ac319d52e9 100644 --- a/packages/repo/src/mst/util.ts +++ b/packages/repo/src/mst/util.ts @@ -1,6 +1,6 @@ import { CID } from 'multiformats' import * as uint8arrays from 'uint8arrays' -import { RepoStorage } from '../storage' +import { ReadableBlockstore } from '../storage' import { sha256 } from '@atproto/crypto' import { MST, Leaf, NodeEntry, NodeData, MstOpts, Fanout } from './mst' import { cidForData } from '@atproto/common' @@ -39,7 +39,7 @@ export const layerForEntries = async ( } export const deserializeNodeData = async ( - storage: RepoStorage, + storage: ReadableBlockstore, data: NodeData, opts?: Partial, ): Promise => { diff --git a/packages/repo/src/parse.ts b/packages/repo/src/parse.ts new file mode 100644 index 00000000000..98d5205b636 --- /dev/null +++ b/packages/repo/src/parse.ts @@ -0,0 +1,30 @@ +import { check, ipldBytesToValue } from '@atproto/common' +import { CID } from 'multiformats/cid' +import BlockMap from './block-map' +import { MissingBlockError, UnexpectedObjectError } from './error' + +export const getAndParse = async ( + blocks: BlockMap, + cid: CID, + def: check.Def, +): Promise<{ obj: T; bytes: Uint8Array }> => { + const bytes = blocks.get(cid) + if (!bytes) { + throw new MissingBlockError(cid, def) + } + return parseObj(bytes, cid, def) +} + +export const parseObj = async ( + bytes: Uint8Array, + cid: CID, + def: check.Def, +): Promise<{ obj: T; bytes: Uint8Array }> => { + const obj = await ipldBytesToValue(bytes) + const res = def.schema.safeParse(obj) + if (res.success) { + return { obj: res.data, bytes } + } else { + throw new UnexpectedObjectError(cid, def) + } +} diff --git a/packages/repo/src/readable-repo.ts b/packages/repo/src/readable-repo.ts new file mode 100644 index 00000000000..3446e9479ff --- /dev/null +++ b/packages/repo/src/readable-repo.ts @@ -0,0 +1,88 @@ +import { CID } from 'multiformats/cid' +import { + RepoRoot, + Commit, + def, + DataStore, + RepoMeta, + RepoContents, +} from './types' +import { ReadableBlockstore } from './storage' +import { MST } from './mst' +import log from './logger' +import * as util from './util' +import * as parse from './parse' +import { MissingBlocksError } from './error' + +type Params = { + storage: ReadableBlockstore + data: DataStore + commit: Commit + root: RepoRoot + meta: RepoMeta + cid: CID +} + +export class ReadableRepo { + storage: ReadableBlockstore + data: DataStore + commit: Commit + root: RepoRoot + meta: RepoMeta + cid: CID + + constructor(params: Params) { + this.storage = params.storage + this.data = params.data + this.commit = params.commit + this.root = params.root + this.meta = params.meta + this.cid = params.cid + } + + static async load(storage: ReadableBlockstore, commitCid: CID) { + const commit = await storage.readObj(commitCid, def.commit) + const root = await storage.readObj(commit.root, def.repoRoot) + const meta = await storage.readObj(root.meta, def.repoMeta) + const data = await MST.load(storage, root.data) + log.info({ did: meta.did }, 'loaded repo for') + return new ReadableRepo({ + storage, + data, + commit, + root, + meta, + cid: commitCid, + }) + } + + get did(): string { + return this.meta.did + } + + async getRecord(collection: string, rkey: string): Promise { + const dataKey = collection + '/' + rkey + const cid = await this.data.get(dataKey) + if (!cid) return null + return this.storage.readObj(cid, def.unknown) + } + + async getContents(): Promise { + const entries = await this.data.list() + const cids = entries.map((e) => e.value) + const { blocks, missing } = await this.storage.getBlocks(cids) + if (missing.length > 0) { + throw new MissingBlocksError('getContents record', missing) + } + const contents: RepoContents = {} + for (const entry of entries) { + const { collection, rkey } = util.parseRecordKey(entry.key) + contents[collection] ??= {} + const parsed = await parse.getAndParse(blocks, entry.value, def.record) + contents[collection][rkey] = parsed.obj + } + return contents + } +} + +export default ReadableRepo diff --git a/packages/repo/src/repo.ts b/packages/repo/src/repo.ts index dad0a9ee92a..d0432affb88 100644 --- a/packages/repo/src/repo.ts +++ b/packages/repo/src/repo.ts @@ -1,6 +1,6 @@ import { CID } from 'multiformats/cid' -import { CarWriter } from '@ipld/car' import { BlockWriter } from '@ipld/car/writer' +import * as crypto from '@atproto/crypto' import { RepoRoot, Commit, @@ -12,12 +12,13 @@ import { CommitData, WriteOpAction, } from './types' -import { streamToArray } from '@atproto/common' import { RepoStorage } from './storage' -import * as auth from '@atproto/auth' import { MST } from './mst' +import DataDiff from './data-diff' import log from './logger' import BlockMap from './block-map' +import { ReadableRepo } from './readable-repo' +import * as util from './util' type Params = { storage: RepoStorage @@ -28,32 +29,19 @@ type Params = { cid: CID } -export class Repo { +export class Repo extends ReadableRepo { storage: RepoStorage - data: DataStore - commit: Commit - root: RepoRoot - meta: RepoMeta - cid: CID constructor(params: Params) { - this.storage = params.storage - this.data = params.data - this.commit = params.commit - this.root = params.root - this.meta = params.meta - this.cid = params.cid + super(params) } static async formatInitCommit( storage: RepoStorage, did: string, - authStore: auth.AuthStore, + keypair: crypto.Keypair, initialRecords: RecordCreateOp[] = [], ): Promise { - if (!(await authStore.canSignForDid(did))) { - throw new Error(`provided authStore cannot sign for did: ${did}`) - } const newBlocks = new BlockMap() let data = await MST.create(storage) @@ -62,8 +50,8 @@ export class Repo { const dataKey = write.collection + '/' + write.rkey data = await data.add(dataKey, cid) } - const dataDiff = await data.blockDiff() - newBlocks.addMap(dataDiff.blocks) + const unstoredData = await data.getUnstoredBlocks() + newBlocks.addMap(unstoredData.blocks) const meta: RepoMeta = { did, @@ -75,19 +63,18 @@ export class Repo { const root: RepoRoot = { meta: metaCid, prev: null, - auth_token: null, - data: dataDiff.root, + data: unstoredData.root, } const rootCid = await newBlocks.add(root) const commit: Commit = { root: rootCid, - sig: await authStore.sign(rootCid.bytes), + sig: await keypair.sign(rootCid.bytes), } const commitCid = await newBlocks.add(commit) return { - root: commitCid, + commit: commitCid, prev: null, blocks: newBlocks, } @@ -96,18 +83,18 @@ export class Repo { static async create( storage: RepoStorage, did: string, - authStore: auth.AuthStore, + keypair: crypto.Keypair, initialRecords: RecordCreateOp[] = [], ): Promise { const commit = await Repo.formatInitCommit( storage, did, - authStore, + keypair, initialRecords, ) await storage.applyCommit(commit) log.info({ did }, `created repo`) - return Repo.load(storage, commit.root) + return Repo.load(storage, commit.commit) } static async load(storage: RepoStorage, cid?: CID) { @@ -115,9 +102,9 @@ export class Repo { if (!commitCid) { throw new Error('No cid provided and none in storage') } - const commit = await storage.get(commitCid, def.commit) - const root = await storage.get(commit.root, def.repoRoot) - const meta = await storage.get(root.meta, def.repoMeta) + const commit = await storage.readObj(commitCid, def.commit) + const root = await storage.readObj(commit.root, def.repoRoot) + const meta = await storage.readObj(root.meta, def.repoMeta) const data = await MST.load(storage, root.data) log.info({ did: meta.did }, 'loaded repo for') return new Repo({ @@ -130,24 +117,10 @@ export class Repo { }) } - get did(): string { - return this.meta.did - } - - async getRecord(collection: string, rkey: string): Promise { - const dataKey = collection + '/' + rkey - const cid = await this.data.get(dataKey) - if (!cid) return null - return this.storage.getUnchecked(cid) - } - async createCommit( toWrite: RecordWriteOp | RecordWriteOp[], - authStore: auth.AuthStore, + keypair: crypto.Keypair, ): Promise { - if (!(await authStore.canSignForDid(this.did))) { - throw new Error(`provided authStore cannot sign for did: ${this.did}`) - } const writes = Array.isArray(toWrite) ? toWrite : [toWrite] const newBlocks = new BlockMap() @@ -167,25 +140,38 @@ export class Repo { } } - const dataDiff = await data.blockDiff() - newBlocks.addMap(dataDiff.blocks) + const unstoredData = await data.getUnstoredBlocks() + newBlocks.addMap(unstoredData.blocks) + + // ensure we're not missing any blocks that were removed and then readded in this commit + const diff = await DataDiff.of(data, this.data) + const found = newBlocks.getMany(diff.newCidList()) + if (found.missing.length > 0) { + const fromStorage = await this.storage.getBlocks(found.missing) + if (fromStorage.missing.length > 0) { + // this shouldn't ever happen + throw new Error( + 'Could not find block for commit in Datastore or storage', + ) + } + newBlocks.addMap(fromStorage.blocks) + } const root: RepoRoot = { meta: this.root.meta, prev: this.cid, - auth_token: null, - data: dataDiff.root, + data: unstoredData.root, } const rootCid = await newBlocks.add(root) const commit: Commit = { root: rootCid, - sig: await authStore.sign(rootCid.bytes), + sig: await keypair.sign(rootCid.bytes), } const commitCid = await newBlocks.add(commit) return { - root: commitCid, + commit: commitCid, prev: this.cid, blocks: newBlocks, } @@ -193,67 +179,42 @@ export class Repo { async applyCommit( toWrite: RecordWriteOp | RecordWriteOp[], - authStore: auth.AuthStore, + keypair: crypto.Keypair, ): Promise { - const commit = await this.createCommit(toWrite, authStore) + const commit = await this.createCommit(toWrite, keypair) await this.storage.applyCommit(commit) - return Repo.load(this.storage, commit.root) - } - - async revert(count: number): Promise { - let revertTo = this.cid - for (let i = 0; i < count; i++) { - const commit = await this.storage.get(revertTo, def.commit) - const root = await this.storage.get(commit.root, def.repoRoot) - if (root.prev === null) { - throw new Error(`Could not revert ${count} commits`) - } - revertTo = root.prev - } - return Repo.load(this.storage, revertTo) + return Repo.load(this.storage, commit.commit) } // CAR FILES // ----------- - async getCarNoHistory(): Promise { - return this.openCar((car: BlockWriter) => { + async getCheckout(): Promise { + return util.writeCar(this.cid, (car: BlockWriter) => { return this.writeCheckoutToCarStream(car) }) } - async getDiffCar(to: CID | null): Promise { - return this.openCar((car: BlockWriter) => { + async getDiff(to: CID | null): Promise { + return util.writeCar(this.cid, (car: BlockWriter) => { return this.writeCommitsToCarStream(car, this.cid, to) }) } - async getFullHistory(): Promise { - return this.getDiffCar(null) - } - - private async openCar( - fn: (car: BlockWriter) => Promise, - ): Promise { - const { writer, out } = CarWriter.create([this.cid]) - const bytes = streamToArray(out) - try { - await fn(writer) - } finally { - writer.close() - } - return bytes + async getFullRepo(): Promise { + return this.getDiff(null) } async writeCheckoutToCarStream(car: BlockWriter): Promise { - const commit = await this.storage.get(this.cid, def.commit) - const root = await this.storage.get(commit.root, def.repoRoot) - await this.storage.addToCar(car, this.cid) - await this.storage.addToCar(car, commit.root) - await this.storage.addToCar(car, root.meta) - if (root.auth_token) { - await this.storage.addToCar(car, root.auth_token) - } + const commit = await this.storage.readObjAndBytes(this.cid, def.commit) + await car.put({ cid: this.cid, bytes: commit.bytes }) + const root = await this.storage.readObjAndBytes( + commit.obj.root, + def.repoRoot, + ) + await car.put({ cid: commit.obj.root, bytes: root.bytes }) + const meta = await this.storage.readObjAndBytes(root.obj.meta, def.repoMeta) + await car.put({ cid: root.obj.meta, bytes: meta.bytes }) await this.data.writeToCarStream(car) } diff --git a/packages/repo/src/storage/index.ts b/packages/repo/src/storage/index.ts index 6d540a5ebb6..c5eb715d59a 100644 --- a/packages/repo/src/storage/index.ts +++ b/packages/repo/src/storage/index.ts @@ -1,3 +1,5 @@ -export * from './memory-blockstore' +export * from './readable-blockstore' export * from './repo-storage' +export * from './memory-blockstore' +export * from './sync-storage' export * from './types' diff --git a/packages/repo/src/storage/memory-blockstore.ts b/packages/repo/src/storage/memory-blockstore.ts index e3b36c4e5ef..dc35dae667a 100644 --- a/packages/repo/src/storage/memory-blockstore.ts +++ b/packages/repo/src/storage/memory-blockstore.ts @@ -1,43 +1,60 @@ import { CID } from 'multiformats/cid' -import RepoStorage from './repo-storage' -import { CommitBlockData, CommitData, def } from '../types' +import { CommitData, def } from '../types' import BlockMap from '../block-map' import { MST } from '../mst' +import DataDiff from '../data-diff' +import { MissingCommitBlocksError } from '../error' +import RepoStorage from './repo-storage' export class MemoryBlockstore extends RepoStorage { blocks: BlockMap head: CID | null = null - constructor() { + constructor(blocks?: BlockMap) { super() this.blocks = new BlockMap() + if (blocks) { + this.blocks.addMap(blocks) + } } async getHead(): Promise { return this.head } - async getSavedBytes(cid: CID): Promise { + async getBytes(cid: CID): Promise { return this.blocks.get(cid) || null } - async hasSavedBytes(cid: CID): Promise { + async has(cid: CID): Promise { return this.blocks.has(cid) } + async getBlocks(cids: CID[]): Promise<{ blocks: BlockMap; missing: CID[] }> { + return this.blocks.getMany(cids) + } + async putBlock(cid: CID, block: Uint8Array): Promise { this.blocks.set(cid, block) } async putMany(blocks: BlockMap): Promise { - blocks.forEach((val, key) => { - this.blocks.set(key, val) + this.blocks.addMap(blocks) + } + + async indexCommits(commits: CommitData[]): Promise { + commits.forEach((commit) => { + this.blocks.addMap(commit.blocks) }) } + async updateHead(cid: CID, _prev: CID | null): Promise { + this.head = cid + } + async applyCommit(commit: CommitData): Promise { this.blocks.addMap(commit.blocks) - this.head = commit.root + this.head = commit.commit } async getCommitPath( @@ -48,56 +65,41 @@ export class MemoryBlockstore extends RepoStorage { const path: CID[] = [] while (curr !== null) { path.push(curr) - const commit = await this.get(curr, def.commit) - if (earliest && curr.equals(earliest)) { - return path.reverse() - } - const root = await this.get(commit.root, def.repoRoot) + const commit = await this.readObj(curr, def.commit) + const root = await this.readObj(commit.root, def.repoRoot) if (!earliest && root.prev === null) { return path.reverse() + } else if (earliest && root.prev.equals(earliest)) { + return path.reverse() } curr = root.prev } return null } - async getMany(cids: CID[]): Promise { - const blocks = new BlockMap() - await Promise.all( - cids.map(async (cid) => { - const bytes = await this.getBytes(cid) - if (bytes) { - blocks.set(cid, bytes) - } - }), - ) - return blocks - } - - async getCommits( - latest: CID, - earliest: CID | null, - ): Promise { - const commitPath = await this.getCommitPath(latest, earliest) - if (commitPath === null) return null - const commitData: CommitBlockData[] = [] + async getBlocksForCommits( + commits: CID[], + ): Promise<{ [commit: string]: BlockMap }> { + const commitData: { [commit: string]: BlockMap } = {} let prevData: MST | null = null - for (const commitCid of commitPath) { - const commit = await this.get(commitCid, def.commit) - const root = await this.get(commit.root, def.repoRoot) + for (const commitCid of commits) { + const commit = await this.readObj(commitCid, def.commit) + const root = await this.readObj(commit.root, def.repoRoot) const data = await MST.load(this, root.data) - const newCids = prevData - ? (await prevData.diff(data)).newCidList() - : (await data.allCids()).toList() - const blocks = await this.getMany([commitCid, commit.root, ...newCids]) + const diff = await DataDiff.of(data, prevData) + const { blocks, missing } = await this.getBlocks([ + commitCid, + commit.root, + ...diff.newCidList(), + ]) + if (missing.length > 0) { + throw new MissingCommitBlocksError(commitCid, missing) + } if (!root.prev) { - const metaBytes = await this.guaranteeBytes(root.meta) - blocks.set(root.meta, metaBytes) + const meta = await this.readObjAndBytes(root.meta, def.repoMeta) + blocks.set(root.meta, meta.bytes) } - commitData.push({ - root: commitCid, - blocks, - }) + commitData[commitCid.toString()] = blocks prevData = data } return commitData @@ -114,15 +116,6 @@ export class MemoryBlockstore extends RepoStorage { async destroy(): Promise { this.blocks.clear() } - - // Mainly for dev purposes - async getContents(): Promise> { - const contents: Record = {} - for (const entry of this.blocks.entries()) { - contents[entry.cid.toString()] = await this.getUnchecked(entry.cid) - } - return contents - } } export default MemoryBlockstore diff --git a/packages/repo/src/storage/readable-blockstore.ts b/packages/repo/src/storage/readable-blockstore.ts new file mode 100644 index 00000000000..3c5419cdb16 --- /dev/null +++ b/packages/repo/src/storage/readable-blockstore.ts @@ -0,0 +1,29 @@ +import { check } from '@atproto/common' +import { CID } from 'multiformats/cid' +import BlockMap from '../block-map' +import { MissingBlockError } from '../error' +import * as parse from '../parse' + +export abstract class ReadableBlockstore { + abstract getBytes(cid: CID): Promise + abstract has(cid: CID): Promise + abstract getBlocks(cids: CID[]): Promise<{ blocks: BlockMap; missing: CID[] }> + + async readObjAndBytes( + cid: CID, + def: check.Def, + ): Promise<{ obj: T; bytes: Uint8Array }> { + const bytes = await this.getBytes(cid) + if (!bytes) { + throw new MissingBlockError(cid, def) + } + return parse.parseObj(bytes, cid, def) + } + + async readObj(cid: CID, def: check.Def): Promise { + const obj = await this.readObjAndBytes(cid, def) + return obj.obj + } +} + +export default ReadableBlockstore diff --git a/packages/repo/src/storage/repo-storage.ts b/packages/repo/src/storage/repo-storage.ts index 4b13c390be7..7383545cdff 100644 --- a/packages/repo/src/storage/repo-storage.ts +++ b/packages/repo/src/storage/repo-storage.ts @@ -1,91 +1,41 @@ import { CID } from 'multiformats/cid' -import { BlockWriter } from '@ipld/car/writer' - -import * as common from '@atproto/common' -import { check } from '@atproto/common' -import { DataDiff } from '../mst' -import { CommitBlockData, CommitData } from '../types' import BlockMap from '../block-map' -import * as util from '../util' - -export abstract class RepoStorage { - temp: BlockMap = new BlockMap() +import { CommitBlockData, CommitData } from '../types' +import ReadableBlockstore from './readable-blockstore' +export abstract class RepoStorage extends ReadableBlockstore { abstract getHead(forUpdate?: boolean): Promise - abstract getSavedBytes(cid: CID): Promise - abstract hasSavedBytes(cid: CID): Promise - abstract putBlock(cid: CID, block: Uint8Array): Promise - abstract putMany(blocks: BlockMap): Promise - abstract applyCommit(commit: CommitData): Promise abstract getCommitPath( latest: CID, earliest: CID | null, ): Promise - abstract getCommits( - latest: CID, - earliest: CID | null, - ): Promise - abstract destroy(): Promise - - async get(cid: CID, schema: check.Def): Promise { - const value = await this.getUnchecked(cid) - try { - return check.assure(schema, value) - } catch (err) { - throw new Error( - `Did not find expected object at ${cid.toString()}: ${err}`, - ) - } - } - - async getUnchecked(cid: CID): Promise { - const bytes = await this.getBytes(cid) - if (!bytes) { - throw new Error(`Not found: ${cid.toString()}`) - } - return common.ipldBytesToValue(bytes) - } + abstract getBlocksForCommits( + commits: CID[], + ): Promise<{ [commit: string]: BlockMap }> - async getBytes(cid: CID): Promise { - return this.temp.get(cid) || (await this.getSavedBytes(cid)) - } - - async guaranteeBytes(cid: CID): Promise { - const bytes = await this.getBytes(cid) - if (!bytes) { - throw new Error(`Not found: ${cid.toString()}`) - } - return bytes - } - - async has(cid: CID): Promise { - return this.temp.has(cid) || (await this.hasSavedBytes(cid)) - } - - async isMissing(cid: CID): Promise { - const has = await this.has(cid) - return !has - } + abstract putBlock(cid: CID, block: Uint8Array): Promise + abstract putMany(blocks: BlockMap): Promise + abstract updateHead(cid: CID, prev: CID | null): Promise + abstract indexCommits(commit: CommitData[]): Promise - async addToCar(car: BlockWriter, cid: CID) { - car.put({ cid, bytes: await this.guaranteeBytes(cid) }) + async applyCommit(commit: CommitData): Promise { + await Promise.all([ + this.indexCommits([commit]), + this.updateHead(commit.commit, commit.prev), + ]) } - async loadDiff( - carBytes: Uint8Array, - verify: (root: CID) => Promise, - ): Promise<{ root: CID; diff: DataDiff }> { - const { root, blocks } = await util.readCar(carBytes) - this.temp.addMap(blocks) - try { - const diff = await verify(root) - await this.putMany(this.temp) - this.temp.clear() - return { root, diff } - } catch (err) { - this.temp.clear() - throw err - } + async getCommits( + latest: CID, + earliest: CID | null, + ): Promise { + const commitPath = await this.getCommitPath(latest, earliest) + if (!commitPath) return null + const blocksByCommit = await this.getBlocksForCommits(commitPath) + return commitPath.map((commit) => ({ + commit, + blocks: blocksByCommit[commit.toString()] || new BlockMap(), + })) } } diff --git a/packages/repo/src/storage/sync-storage.ts b/packages/repo/src/storage/sync-storage.ts new file mode 100644 index 00000000000..02a039ca57b --- /dev/null +++ b/packages/repo/src/storage/sync-storage.ts @@ -0,0 +1,35 @@ +import { CID } from 'multiformats/cid' +import BlockMap from '../block-map' +import ReadableBlockstore from './readable-blockstore' + +export class SyncStorage extends ReadableBlockstore { + constructor( + public staged: ReadableBlockstore, + public saved: ReadableBlockstore, + ) { + super() + } + + async getBytes(cid: CID): Promise { + const got = await this.staged.getBytes(cid) + if (got) return got + return this.saved.getBytes(cid) + } + + async getBlocks(cids: CID[]): Promise<{ blocks: BlockMap; missing: CID[] }> { + const fromStaged = await this.staged.getBlocks(cids) + const fromSaved = await this.saved.getBlocks(fromStaged.missing) + const blocks = fromStaged.blocks + blocks.add(fromSaved.blocks) + return { + blocks, + missing: fromSaved.missing, + } + } + + async has(cid: CID): Promise { + return (await this.staged.has(cid)) || (await this.saved.has(cid)) + } +} + +export default SyncStorage diff --git a/packages/repo/src/sync.ts b/packages/repo/src/sync.ts index b0d07916041..fec88449d31 100644 --- a/packages/repo/src/sync.ts +++ b/packages/repo/src/sync.ts @@ -1,33 +1,118 @@ -import * as auth from '@atproto/auth' import { CID } from 'multiformats/cid' -import { RepoStorage } from './storage' -import { DataDiff } from './mst' +import { DidResolver } from '@atproto/did-resolver' +import { MemoryBlockstore, RepoStorage } from './storage' +import DataDiff from './data-diff' import Repo from './repo' import * as verify from './verify' +import * as util from './util' +import { CommitData, RecordWriteOp, RepoContents } from './types' +import CidSet from './cid-set' +import { MissingBlocksError } from './error' -export const loadRepoFromCar = async ( - carBytes: Uint8Array, +export const loadCheckout = async ( storage: RepoStorage, - verifier: auth.Verifier, -): Promise => { - const { root } = await storage.loadDiff(carBytes, (root: CID) => { - return verify.verifyUpdates(storage, null, root, verifier) - }) - return Repo.load(storage, root) + repoCar: Uint8Array, + didResolver: DidResolver, +): Promise<{ root: CID; contents: RepoContents }> => { + const { root, blocks } = await util.readCar(repoCar) + const updateStorage = new MemoryBlockstore(blocks) + const checkout = await verify.verifyCheckout(updateStorage, root, didResolver) + + const checkoutBlocks = await updateStorage.getBlocks( + checkout.newCids.toList(), + ) + if (checkoutBlocks.missing.length > 0) { + throw new MissingBlocksError('sync', checkoutBlocks.missing) + } + await Promise.all([ + storage.putMany(checkoutBlocks.blocks), + storage.updateHead(root, null), + ]) + + return { + root, + contents: checkout.contents, + } +} + +export const loadFullRepo = async ( + storage: RepoStorage, + repoCar: Uint8Array, + didResolver: DidResolver, +): Promise<{ root: CID; ops: RecordWriteOp[] }> => { + const { root, blocks } = await util.readCar(repoCar) + const updateStorage = new MemoryBlockstore(blocks) + const updates = await verify.verifyFullHistory( + updateStorage, + root, + didResolver, + ) + + const [ops] = await Promise.all([ + persistUpdates(storage, updateStorage, updates), + storage.updateHead(root, null), + ]) + + return { + root, + ops, + } } export const loadDiff = async ( repo: Repo, diffCar: Uint8Array, - verifier: auth.Verifier, -): Promise<{ repo: Repo; diff: DataDiff }> => { - const storage = repo.storage - const { root, diff } = await storage.loadDiff(diffCar, (root: CID) => { - return verify.verifyUpdates(storage, repo.cid, root, verifier) - }) - const updatedRepo = await Repo.load(storage, root) + didResolver: DidResolver, +): Promise<{ root: CID; ops: RecordWriteOp[] }> => { + const { root, blocks } = await util.readCar(diffCar) + const updateStorage = new MemoryBlockstore(blocks) + const updates = await verify.verifyUpdates( + repo, + updateStorage, + root, + didResolver, + ) + + const [ops] = await Promise.all([ + persistUpdates(repo.storage, updateStorage, updates), + repo.storage.updateHead(root, repo.cid), + ]) + return { - repo: updatedRepo, - diff, + root, + ops, + } +} + +export const persistUpdates = async ( + storage: RepoStorage, + updateStorage: RepoStorage, + updates: verify.VerifiedUpdate[], +): Promise => { + const newCids = new CidSet() + const fullDiff = new DataDiff() + for (const update of updates) { + newCids.addSet(update.newCids) + fullDiff.addDiff(update.diff) + } + + const diffBlocks = await updateStorage.getBlocks(newCids.toList()) + if (diffBlocks.missing.length > 0) { + throw new MissingBlocksError('sync', diffBlocks.missing) } + const commits: CommitData[] = updates.map((update) => { + const forCommit = diffBlocks.blocks.getMany(update.newCids.toList()) + if (forCommit.missing.length > 0) { + throw new MissingBlocksError('sync', forCommit.missing) + } + return { + commit: update.commit, + prev: update.prev, + blocks: forCommit.blocks, + } + }) + + await storage.indexCommits(commits) + + return util.diffToWriteOps(fullDiff, diffBlocks.blocks) } diff --git a/packages/repo/src/types.ts b/packages/repo/src/types.ts index 0348d13bbbc..d4d81a913cf 100644 --- a/packages/repo/src/types.ts +++ b/packages/repo/src/types.ts @@ -1,10 +1,12 @@ import { z } from 'zod' import { BlockWriter } from '@ipld/car/writer' -import { def as common } from '@atproto/common' +import { schema as common, def as commonDef } from '@atproto/common' import { CID } from 'multiformats' -import { DataDiff } from './mst' import BlockMap from './block-map' +// Repo nodes +// --------------- + const repoMeta = z.object({ did: z.string(), version: z.number(), @@ -15,7 +17,7 @@ export type RepoMeta = z.infer const repoRoot = z.object({ meta: common.cid, prev: common.cid.nullable(), - auth_token: common.cid.nullable(), + auth_token: common.cid.nullable().optional(), data: common.cid, }) export type RepoRoot = z.infer @@ -26,6 +28,32 @@ const commit = z.object({ }) export type Commit = z.infer +export const schema = { + ...common, + repoMeta, + repoRoot, + commit, +} + +export const def = { + ...commonDef, + repoMeta: { + name: 'repo meta', + schema: schema.repoMeta, + }, + repoRoot: { + name: 'repo root', + schema: schema.repoRoot, + }, + commit: { + name: 'commit', + schema: schema.commit, + }, +} + +// Repo Operations +// --------------- + export enum WriteOpAction { Create = 'create', Update = 'update', @@ -54,15 +82,11 @@ export type RecordDeleteOp = { export type RecordWriteOp = RecordCreateOp | RecordUpdateOp | RecordDeleteOp -export const def = { - ...common, - repoMeta, - repoRoot, - commit, -} +// Updates/Commits +// --------------- export type CommitBlockData = { - root: CID + commit: CID blocks: BlockMap } @@ -70,10 +94,16 @@ export type CommitData = CommitBlockData & { prev: CID | null } -export interface CarStreamable { - writeToCarStream(car: BlockWriter): Promise +export type RepoUpdate = CommitData & { + ops: RecordWriteOp[] } +export type CollectionContents = Record> +export type RepoContents = Record + +// DataStores +// --------------- + export type DataValue = { key: string value: CID @@ -84,9 +114,8 @@ export interface DataStore { update(key: string, value: CID): Promise delete(key: string): Promise get(key: string): Promise - list(count: number, after?: string, before?: string): Promise + list(count?: number, after?: string, before?: string): Promise listWithPrefix(prefix: string, count?: number): Promise - diff(other: DataStore): Promise - blockDiff(): Promise<{ root: CID; blocks: BlockMap }> + getUnstoredBlocks(): Promise<{ root: CID; blocks: BlockMap }> writeToCarStream(car: BlockWriter): Promise } diff --git a/packages/repo/src/util.ts b/packages/repo/src/util.ts index dfeceb3984f..51090128adf 100644 --- a/packages/repo/src/util.ts +++ b/packages/repo/src/util.ts @@ -1,9 +1,11 @@ import { CID } from 'multiformats/cid' import { CarReader } from '@ipld/car/reader' -import * as auth from '@atproto/auth' -import { def } from '@atproto/common' +import { BlockWriter, CarWriter } from '@ipld/car/writer' +import { Block as CarBlock } from '@ipld/car/api' +import { def, streamToArray, verifyCidForBytes } from '@atproto/common' import Repo from './repo' -import { DataDiff, MST } from './mst' +import { MST } from './mst' +import DataDiff from './data-diff' import { RepoStorage } from './storage' import { DataStore, @@ -14,17 +16,30 @@ import { WriteOpAction, } from './types' import BlockMap from './block-map' +import { MissingBlocksError } from './error' +import * as parse from './parse' -export const ucanForOperation = async ( - prevData: DataStore, - newData: DataStore, - rootDid: string, - authStore: auth.AuthStore, -): Promise => { - const diff = await prevData.diff(newData) - const neededCaps = diff.neededCapabilities(rootDid) - const ucanForOp = await authStore.createUcanForCaps(rootDid, neededCaps, 30) - return auth.encodeUcan(ucanForOp) +export async function* verifyIncomingCarBlocks( + car: AsyncIterable, +): AsyncIterable { + for await (const block of car) { + await verifyCidForBytes(block.cid, block.bytes) + yield block + } +} + +export const writeCar = async ( + root: CID, + fn: (car: BlockWriter) => Promise, +): Promise => { + const { writer, out } = CarWriter.create(root) + const bytes = streamToArray(out) + try { + await fn(writer) + } finally { + writer.close() + } + return bytes } export const readCar = async ( @@ -37,7 +52,7 @@ export const readCar = async ( } const root = roots[0] const blocks = new BlockMap() - for await (const block of car.blocks()) { + for await (const block of verifyIncomingCarBlocks(car.blocks())) { await blocks.set(block.cid, block.bytes) } return { @@ -59,36 +74,43 @@ export const getWriteOpLog = async ( const msts = heads.map((h) => h.data) const diffs: DataDiff[] = [] for (const mst of msts) { - diffs.push(await prev.diff(mst)) + diffs.push(await DataDiff.of(mst, prev)) prev = mst } + const fullDiff = collapseDiffs(diffs) + const diffBlocks = await storage.getBlocks(fullDiff.newCidList()) + if (diffBlocks.missing.length > 0) { + throw new MissingBlocksError('write op log', diffBlocks.missing) + } // Map MST diffs to write ops - return Promise.all(diffs.map((diff) => diffToWriteOps(storage, diff))) + return Promise.all( + diffs.map((diff) => diffToWriteOps(diff, diffBlocks.blocks)), + ) } export const diffToWriteOps = ( - storage: RepoStorage, diff: DataDiff, + blocks: BlockMap, ): Promise => { return Promise.all([ ...diff.addList().map(async (add) => { const { collection, rkey } = parseRecordKey(add.key) - const value = await storage.get(add.cid, def.record) + const value = await parse.getAndParse(blocks, add.cid, def.record) return { action: WriteOpAction.Create, collection, rkey, - value, + value: value.obj, } as RecordCreateOp }), ...diff.updateList().map(async (upd) => { const { collection, rkey } = parseRecordKey(upd.key) - const value = await storage.get(upd.cid, def.record) + const value = await parse.getAndParse(blocks, upd.cid, def.record) return { action: WriteOpAction.Update, collection, rkey, - value, + value: value.obj, } as RecordUpdateOp }), ...diff.deleteList().map((del) => { @@ -102,6 +124,13 @@ export const diffToWriteOps = ( ]) } +export const collapseDiffs = (diffs: DataDiff[]): DataDiff => { + return diffs.reduce((acc, cur) => { + acc.addDiff(cur) + return acc + }, new DataDiff()) +} + export const parseRecordKey = (key: string) => { const parts = key.split('/') if (parts.length !== 2) throw new Error(`Invalid record key: ${key}`) diff --git a/packages/repo/src/verify.ts b/packages/repo/src/verify.ts index 439e48eae22..7e3fa584d14 100644 --- a/packages/repo/src/verify.ts +++ b/packages/repo/src/verify.ts @@ -1,52 +1,134 @@ import { CID } from 'multiformats/cid' -import * as auth from '@atproto/auth' -import { RepoStorage } from './storage' +import { DidResolver } from '@atproto/did-resolver' +import * as crypto from '@atproto/crypto' +import { ReadableBlockstore, RepoStorage } from './storage' +import DataDiff from './data-diff' +import SyncStorage from './storage/sync-storage' +import ReadableRepo from './readable-repo' import Repo from './repo' -import { DataDiff } from './mst' -import { def } from './types' +import CidSet from './cid-set' +import { parseRecordKey } from './util' +import { RepoContents } from './types' +import { def } from '@atproto/common' -export const verifyUpdates = async ( +export type VerifiedCheckout = { + contents: RepoContents + newCids: CidSet +} + +export const verifyCheckout = async ( + storage: ReadableBlockstore, + root: CID, + didResolver: DidResolver, +): Promise => { + const repo = await ReadableRepo.load(storage, root) + const validSig = await didResolver.verifySignature( + repo.did, + repo.commit.root.bytes, + repo.commit.sig, + ) + if (!validSig) { + throw new RepoVerificationError( + `Invalid signature on commit: ${repo.cid.toString()}`, + ) + } + const diff = await DataDiff.of(repo.data, null) + const newCids = new CidSet([ + repo.cid, + repo.commit.root, + repo.root.meta, + ]).addSet(diff.newCids) + + const contents: RepoContents = {} + for (const add of diff.addList()) { + const { collection, rkey } = parseRecordKey(add.key) + if (!contents[collection]) { + contents[collection] = {} + } + contents[collection][rkey] = await storage.readObj(add.cid, def.record) + } + + return { + contents, + newCids, + } +} + +export type VerifiedUpdate = { + commit: CID + prev: CID | null + diff: DataDiff + newCids: CidSet +} + +export const verifyFullHistory = async ( storage: RepoStorage, - earliest: CID | null, - latest: CID, - verifier: auth.Verifier, -): Promise => { - const commitPath = await storage.getCommitPath(latest, earliest) + root: CID, + didResolver: DidResolver, +): Promise => { + const commitPath = await storage.getCommitPath(root, null) if (commitPath === null) { throw new RepoVerificationError('Could not find shared history') + } else if (commitPath.length < 1) { + throw new RepoVerificationError('Expected at least one commit') } - const fullDiff = new DataDiff() - if (commitPath.length === 0) return fullDiff - let prevRepo = await Repo.load(storage, commitPath[0]) - for (const commit of commitPath.slice(1)) { - const nextRepo = await Repo.load(storage, commit) - const diff = await prevRepo.data.diff(nextRepo.data) + const baseRepo = await Repo.load(storage, commitPath[0]) + const baseDiff = await DataDiff.of(baseRepo.data, null) + const baseRepoCids = new CidSet([ + baseRepo.cid, + baseRepo.commit.root, + baseRepo.root.meta, + ]).addSet(baseDiff.newCids) + const init: VerifiedUpdate = { + commit: baseRepo.cid, + prev: null, + diff: baseDiff, + newCids: baseRepoCids, + } + const updates = await verifyCommitPath( + baseRepo, + storage, + commitPath.slice(1), + didResolver, + ) + return [init, ...updates] +} + +export const verifyUpdates = async ( + repo: ReadableRepo, + updateStorage: RepoStorage, + updateRoot: CID, + didResolver: DidResolver, +): Promise => { + const commitPath = await updateStorage.getCommitPath(updateRoot, repo.cid) + if (commitPath === null) { + throw new RepoVerificationError('Could not find shared history') + } + const syncStorage = new SyncStorage(updateStorage, repo.storage) + return verifyCommitPath(repo, syncStorage, commitPath, didResolver) +} + +export const verifyCommitPath = async ( + baseRepo: ReadableRepo, + storage: ReadableBlockstore, + commitPath: CID[], + didResolver: DidResolver, +): Promise => { + const signingKey = await didResolver.resolveSigningKey(baseRepo.did) + const updates: VerifiedUpdate[] = [] + if (commitPath.length === 0) return updates + let prevRepo = baseRepo + for (const commit of commitPath) { + const nextRepo = await ReadableRepo.load(storage, commit) + const diff = await DataDiff.of(nextRepo.data, prevRepo.data) if (!nextRepo.root.meta.equals(prevRepo.root.meta)) { throw new RepoVerificationError('Not supported: repo metadata updated') } - let didForSignature: string - if (nextRepo.root.auth_token) { - // verify auth token covers all necessary writes - const encodedToken = await storage.get( - nextRepo.root.auth_token, - def.string, - ) - const token = await verifier.validateUcan(encodedToken) - const neededCaps = diff.neededCapabilities(prevRepo.did) - for (const cap of neededCaps) { - await verifier.verifyAtpUcan(token, prevRepo.did, cap) - } - didForSignature = token.payload.iss - } else { - didForSignature = prevRepo.did - } - // verify signature matches repo root + auth token - // const commit = await toRepo.getCommit() - const validSig = await verifier.verifySignature( - didForSignature, + const validSig = await crypto.verifySignature( + signingKey, nextRepo.commit.root.bytes, nextRepo.commit.sig, ) @@ -56,10 +138,19 @@ export const verifyUpdates = async ( ) } - fullDiff.addDiff(diff) + const newCids = new CidSet([nextRepo.cid, nextRepo.commit.root]).addSet( + diff.newCids, + ) + + updates.push({ + commit: nextRepo.cid, + prev: prevRepo.cid, + diff, + newCids, + }) prevRepo = nextRepo } - return fullDiff + return updates } export class RepoVerificationError extends Error {} diff --git a/packages/repo/tests/_util.ts b/packages/repo/tests/_util.ts index 0a694fa6a61..14c14b2fb5d 100644 --- a/packages/repo/tests/_util.ts +++ b/packages/repo/tests/_util.ts @@ -1,11 +1,16 @@ +import fs from 'fs' import { CID } from 'multiformats' -import { cidForData, TID, valueToIpldBlock } from '@atproto/common' -import * as auth from '@atproto/auth' +import { TID, valueToIpldBlock } from '@atproto/common' +import * as crypto from '@atproto/crypto' import { Repo } from '../src/repo' import { RepoStorage } from '../src/storage' -import { DataDiff, MST } from '../src/mst' -import fs from 'fs' -import { RecordWriteOp, WriteOpAction } from '../src' +import { MST } from '../src/mst' +import { + CollectionContents, + RecordWriteOp, + RepoContents, + WriteOpAction, +} from '../src' type IdMapping = Record @@ -76,18 +81,15 @@ export const generateObject = (): Record => { export const testCollections = ['com.example.posts', 'com.example.likes'] -export type CollectionData = Record -export type RepoData = Record - export const fillRepo = async ( repo: Repo, - authStore: auth.AuthStore, + keypair: crypto.Keypair, itemsPerCollection: number, -): Promise<{ repo: Repo; data: RepoData }> => { - const repoData: RepoData = {} +): Promise<{ repo: Repo; data: RepoContents }> => { + const repoData: RepoContents = {} const writes: RecordWriteOp[] = [] for (const collName of testCollections) { - const collData: CollectionData = {} + const collData: CollectionContents = {} for (let i = 0; i < itemsPerCollection; i++) { const object = generateObject() const rkey = TID.nextStr() @@ -101,7 +103,7 @@ export const fillRepo = async ( } repoData[collName] = collData } - const updated = await repo.applyCommit(writes, authStore) + const updated = await repo.applyCommit(writes, keypair) return { repo: updated, data: repoData, @@ -110,17 +112,16 @@ export const fillRepo = async ( export const editRepo = async ( repo: Repo, - prevData: RepoData, - authStore: auth.AuthStore, + prevData: RepoContents, + keypair: crypto.Keypair, params: { adds?: number updates?: number deletes?: number }, -): Promise<{ repo: Repo; data: RepoData }> => { +): Promise<{ repo: Repo; data: RepoContents }> => { const { adds = 0, updates = 0, deletes = 0 } = params - const repoData: RepoData = {} - const writes: RecordWriteOp[] = [] + const repoData: RepoContents = {} for (const collName of testCollections) { const collData = prevData[collName] const shuffled = shuffle(Object.entries(collData)) @@ -129,99 +130,82 @@ export const editRepo = async ( const object = generateObject() const rkey = TID.nextStr() collData[rkey] = object - writes.push({ - action: WriteOpAction.Create, - collection: collName, - rkey, - value: object, - }) + repo = await repo.applyCommit( + { + action: WriteOpAction.Create, + collection: collName, + rkey, + value: object, + }, + keypair, + ) } const toUpdate = shuffled.slice(0, updates) for (let i = 0; i < toUpdate.length; i++) { const object = generateObject() const rkey = toUpdate[i][0] - writes.push({ - action: WriteOpAction.Update, - collection: collName, - rkey, - value: object, - }) + repo = await repo.applyCommit( + { + action: WriteOpAction.Update, + collection: collName, + rkey, + value: object, + }, + keypair, + ) collData[rkey] = object } const toDelete = shuffled.slice(updates, deletes) for (let i = 0; i < toDelete.length; i++) { const rkey = toDelete[i][0] - writes.push({ - action: WriteOpAction.Delete, - collection: collName, - rkey, - }) + repo = await repo.applyCommit( + { + action: WriteOpAction.Delete, + collection: collName, + rkey, + }, + keypair, + ) delete collData[rkey] } repoData[collName] = collData } - const updated = await repo.applyCommit(writes, authStore) return { - repo: updated, + repo, data: repoData, } } -export const checkRepo = async (repo: Repo, data: RepoData): Promise => { - for (const collName of Object.keys(data)) { - const collData = data[collName] - for (const rkey of Object.keys(collData)) { - const record = await repo.getRecord(collName, rkey) - expect(record).toEqual(collData[rkey]) - } - } -} - -export const checkRepoDiff = async ( - diff: DataDiff, - before: RepoData, - after: RepoData, +export const verifyRepoDiff = async ( + ops: RecordWriteOp[], + before: RepoContents, + after: RepoContents, ): Promise => { - const getObjectCid = async ( - key: string, - data: RepoData, - ): Promise => { - const parts = key.split('/') - const collection = parts[0] - const obj = (data[collection] || {})[parts[1]] - return obj === undefined ? undefined : cidForData(obj) - } - - for (const add of diff.addList()) { - const beforeCid = await getObjectCid(add.key, before) - const afterCid = await getObjectCid(add.key, after) - - expect(beforeCid).toBeUndefined() - expect(afterCid).toEqual(add.cid) - } - - for (const update of diff.updateList()) { - const beforeCid = await getObjectCid(update.key, before) - const afterCid = await getObjectCid(update.key, after) - - expect(beforeCid).toEqual(update.prev) - expect(afterCid).toEqual(update.cid) - } - - for (const del of diff.deleteList()) { - const beforeCid = await getObjectCid(del.key, before) - const afterCid = await getObjectCid(del.key, after) - - expect(beforeCid).toEqual(del.cid) - expect(afterCid).toBeUndefined() + const getVal = (op: RecordWriteOp, data: RepoContents) => { + return (data[op.collection] || {})[op.rkey] + } + + for (const op of ops) { + if (op.action === WriteOpAction.Create) { + expect(getVal(op, before)).toBeUndefined() + expect(getVal(op, after)).toEqual(op.value) + } else if (op.action === WriteOpAction.Update) { + expect(getVal(op, before)).toBeDefined() + expect(getVal(op, after)).toEqual(op.value) + } else if (op.action === WriteOpAction.Delete) { + expect(getVal(op, before)).toBeDefined() + expect(getVal(op, after)).toBeUndefined() + } else { + throw new Error('unexpected op type') + } } } -export const saveMst = async (mst: MST): Promise => { - const diff = await mst.blockDiff() - await mst.storage.putMany(diff.blocks) +export const saveMst = async (storage: RepoStorage, mst: MST): Promise => { + const diff = await mst.getUnstoredBlocks() + await storage.putMany(diff.blocks) return diff.root } diff --git a/packages/repo/tests/mst.test.ts b/packages/repo/tests/mst.test.ts index 4ba32477b0c..346861f8ce0 100644 --- a/packages/repo/tests/mst.test.ts +++ b/packages/repo/tests/mst.test.ts @@ -1,4 +1,5 @@ -import { MST, DataAdd, DataUpdate, DataDelete } from '../src/mst' +import { MST } from '../src/mst' +import DataDiff, { DataAdd, DataUpdate, DataDelete } from '../src/data-diff' import { countPrefixLen } from '../src/mst/util' import { MemoryBlockstore } from '../src/storage' @@ -90,7 +91,7 @@ describe('Merkle Search Tree', () => { }) it('saves and loads from blockstore', async () => { - const root = await util.saveMst(mst) + const root = await util.saveMst(blockstore, mst) const loaded = await MST.load(blockstore, root) const origNodes = await mst.allNodes() const loadedNodes = await loaded.allNodes() @@ -131,7 +132,7 @@ describe('Merkle Search Tree', () => { expectedDels[entry[0]] = { key: entry[0], cid: entry[1] } } - const diff = await mst.diff(toDiff) + const diff = await DataDiff.of(toDiff, mst) expect(diff.addList().length).toBe(100) expect(diff.updateList().length).toBe(100) @@ -175,7 +176,7 @@ describe('Merkle Search Tree', () => { const layer = await mst.getLayer() expect(layer).toBe(1) mst = await mst.delete(layer1) - const root = await util.saveMst(mst) + const root = await util.saveMst(blockstore, mst) const loaded = MST.load(blockstore, root) const loadedLayer = await loaded.getLayer() expect(loadedLayer).toBe(0) @@ -224,7 +225,7 @@ describe('Merkle Search Tree', () => { const layer = await mst.getLayer() expect(layer).toBe(2) - const root = await util.saveMst(mst) + const root = await util.saveMst(blockstore, mst) mst = MST.load(blockstore, root, { fanout: 32 }) const allTids = [...layer0, ...layer1, layer2] @@ -256,7 +257,7 @@ describe('Merkle Search Tree', () => { } mst = await mst.add(layer2, cid) - const root = await util.saveMst(mst) + const root = await util.saveMst(blockstore, mst) mst = MST.load(blockstore, root, { fanout: 32 }) const layer = await mst.getLayer() diff --git a/packages/repo/tests/repo.test.ts b/packages/repo/tests/repo.test.ts index 58b8ffc4dfb..0f872769e52 100644 --- a/packages/repo/tests/repo.test.ts +++ b/packages/repo/tests/repo.test.ts @@ -1,29 +1,27 @@ -import * as auth from '@atproto/auth' - +import * as crypto from '@atproto/crypto' import { Repo } from '../src/repo' import { MemoryBlockstore } from '../src/storage' import * as util from './_util' import { TID } from '@atproto/common' -import { WriteOpAction } from '../src' +import { RepoContents, WriteOpAction } from '../src' +import { Secp256k1Keypair } from '@atproto/crypto' describe('Repo', () => { - const verifier = new auth.Verifier() const collName = 'com.example.posts' - let blockstore: MemoryBlockstore - let authStore: auth.AuthStore + let storage: MemoryBlockstore + let keypair: crypto.Keypair let repo: Repo - let repoData: util.RepoData + let repoData: RepoContents it('creates repo', async () => { - blockstore = new MemoryBlockstore() - authStore = await verifier.createTempAuthStore() - await authStore.claimFull() - repo = await Repo.create(blockstore, await authStore.did(), authStore) + storage = new MemoryBlockstore() + keypair = await Secp256k1Keypair.create() + repo = await Repo.create(storage, keypair.did(), keypair) }) it('has proper metadata', async () => { - expect(repo.meta.did).toEqual(await authStore.did()) + expect(repo.meta.did).toEqual(keypair.did()) expect(repo.meta.version).toBe(1) expect(repo.meta.datastore).toBe('mst') }) @@ -38,7 +36,7 @@ describe('Repo', () => { rkey: rkey, value: record, }, - authStore, + keypair, ) let got = await repo.getRecord(collName, rkey) @@ -52,7 +50,7 @@ describe('Repo', () => { rkey: rkey, value: updatedRecord, }, - authStore, + keypair, ) got = await repo.getRecord(collName, rkey) expect(got).toEqual(updatedRecord) @@ -63,32 +61,34 @@ describe('Repo', () => { collection: collName, rkey: rkey, }, - authStore, + keypair, ) got = await repo.getRecord(collName, rkey) expect(got).toBeNull() }) it('adds content collections', async () => { - const filled = await util.fillRepo(repo, authStore, 100) + const filled = await util.fillRepo(repo, keypair, 100) repo = filled.repo repoData = filled.data - await util.checkRepo(repo, repoData) + const contents = await repo.getContents() + expect(contents).toEqual(repoData) }) it('edits and deletes content', async () => { - const edited = await util.editRepo(repo, repoData, authStore, { + const edited = await util.editRepo(repo, repoData, keypair, { adds: 20, updates: 20, deletes: 20, }) repo = edited.repo - await util.checkRepo(repo, repoData) + const contents = await repo.getContents() + expect(contents).toEqual(repoData) }) it('adds a valid signature to commit', async () => { const commit = await repo.commit - const verified = await verifier.verifySignature( + const verified = await crypto.verifySignature( repo.did, commit.root.bytes, commit.sig, @@ -97,14 +97,15 @@ describe('Repo', () => { }) it('sets correct DID', async () => { - expect(repo.did).toEqual(await authStore.did()) + expect(repo.did).toEqual(await keypair.did()) }) it('loads from blockstore', async () => { - const reloadedRepo = await Repo.load(blockstore, repo.cid) + const reloadedRepo = await Repo.load(storage, repo.cid) - await util.checkRepo(reloadedRepo, repoData) - expect(repo.meta.did).toEqual(await authStore.did()) + const contents = await reloadedRepo.getContents() + expect(contents).toEqual(repoData) + expect(repo.meta.did).toEqual(keypair.did()) expect(repo.meta.version).toBe(1) expect(repo.meta.datastore).toBe('mst') }) diff --git a/packages/repo/tests/sync.test.ts b/packages/repo/tests/sync.test.ts index d90a6eafe26..aa46ac9b2ba 100644 --- a/packages/repo/tests/sync.test.ts +++ b/packages/repo/tests/sync.test.ts @@ -1,6 +1,7 @@ -import * as auth from '@atproto/auth' +import * as crypto from '@atproto/crypto' import { TID } from '@atproto/common' -import { Repo, RepoRoot, verifyUpdates, ucanForOperation } from '../src' +import { DidResolver } from '@atproto/did-resolver' +import { Repo, RepoContents, RepoRoot } from '../src' import BlockMap from '../src/block-map' import { MemoryBlockstore } from '../src/storage' import * as sync from '../src/sync' @@ -8,129 +9,111 @@ import * as sync from '../src/sync' import * as util from './_util' describe('Sync', () => { - const verifier = new auth.Verifier() - - let aliceBlockstore: MemoryBlockstore, bobBlockstore: MemoryBlockstore - let aliceRepo: Repo - let aliceAuth: auth.AuthStore - let repoData: util.RepoData + let blockstore: MemoryBlockstore + let syncBlockstore: MemoryBlockstore + let checkoutBlockstore: MemoryBlockstore + let repo: Repo + let keypair: crypto.Keypair + let repoData: RepoContents + const didResolver = new DidResolver() beforeAll(async () => { - aliceBlockstore = new MemoryBlockstore() - aliceAuth = await verifier.createTempAuthStore() - await aliceAuth.claimFull() - aliceRepo = await Repo.create( - aliceBlockstore, - await aliceAuth.did(), - aliceAuth, - ) - bobBlockstore = new MemoryBlockstore() + blockstore = new MemoryBlockstore() + keypair = await crypto.Secp256k1Keypair.create() + repo = await Repo.create(blockstore, keypair.did(), keypair) + syncBlockstore = new MemoryBlockstore() + checkoutBlockstore = new MemoryBlockstore() }) + let bobRepo: Repo + it('syncs an empty repo', async () => { - const car = await aliceRepo.getFullHistory() - const repoBob = await sync.loadRepoFromCar(car, bobBlockstore, verifier) - const data = await repoBob.data.list(10) + const car = await repo.getFullRepo() + const loaded = await sync.loadFullRepo(syncBlockstore, car, didResolver) + bobRepo = await Repo.load(syncBlockstore, loaded.root) + const data = await bobRepo.data.list(10) expect(data.length).toBe(0) }) - let bobRepo: Repo - it('syncs a repo that is starting from scratch', async () => { - const filled = await util.fillRepo(aliceRepo, aliceAuth, 100) - aliceRepo = filled.repo + const filled = await util.fillRepo(repo, keypair, 100) + repo = filled.repo repoData = filled.data - await aliceRepo.getFullHistory() + await repo.getFullRepo() - const car = await aliceRepo.getFullHistory() - bobRepo = await sync.loadRepoFromCar(car, bobBlockstore, verifier) - const diff = await verifyUpdates(bobBlockstore, null, bobRepo.cid, verifier) - await util.checkRepo(bobRepo, repoData) - await util.checkRepoDiff(diff, {}, repoData) + const car = await repo.getFullRepo() + const loaded = await sync.loadFullRepo(syncBlockstore, car, didResolver) + bobRepo = await Repo.load(syncBlockstore, loaded.root) + const contents = await bobRepo.getContents() + expect(contents).toEqual(repoData) + await util.verifyRepoDiff(loaded.ops, {}, repoData) }) it('syncs a repo that is behind', async () => { // add more to alice's repo & have bob catch up const beforeData = JSON.parse(JSON.stringify(repoData)) - const edited = await util.editRepo(aliceRepo, repoData, aliceAuth, { + const edited = await util.editRepo(repo, repoData, keypair, { adds: 20, updates: 20, deletes: 20, }) - aliceRepo = edited.repo + repo = edited.repo repoData = edited.data - const diffCar = await aliceRepo.getDiffCar(bobRepo.cid) - const loaded = await sync.loadDiff(bobRepo, diffCar, verifier) - await util.checkRepo(loaded.repo, repoData) - await util.checkRepoDiff(loaded.diff, beforeData, repoData) + const diffCar = await repo.getDiff(bobRepo.cid) + const loaded = await sync.loadDiff(bobRepo, diffCar, didResolver) + bobRepo = await Repo.load(syncBlockstore, loaded.root) + const contents = await bobRepo.getContents() + expect(contents).toEqual(repoData) + await util.verifyRepoDiff(loaded.ops, beforeData, repoData) }) - it('throws an error on invalid UCANs', async () => { + it('throws on a bad signature', async () => { const obj = util.generateObject() const blocks = new BlockMap() const cid = await blocks.add(obj) - const updatedData = await aliceRepo.data.add( + const updatedData = await repo.data.add( `com.example.test/${TID.next()}`, cid, ) - // we create an unrelated token for bob & try to permission alice's repo commit with it - const bobAuth = await verifier.createTempAuthStore() - const badUcan = await bobAuth.claimFull() - const auth_token = await blocks.add(auth.encodeUcan(badUcan)) - const dataDiff = await updatedData.blockDiff() - blocks.addMap(dataDiff.blocks) + const unstoredData = await updatedData.getUnstoredBlocks() + blocks.addMap(unstoredData.blocks) const root: RepoRoot = { - meta: aliceRepo.root.meta, - prev: aliceRepo.cid, - auth_token, - data: dataDiff.root, + meta: repo.root.meta, + prev: repo.cid, + data: unstoredData.root, } const rootCid = await blocks.add(root) + // we generate a bad sig by signing the data cid instead of root cid const commit = { root: rootCid, - sig: await aliceAuth.sign(rootCid.bytes), + sig: await keypair.sign(unstoredData.root.bytes), } const commitCid = await blocks.add(commit) - await aliceBlockstore.putMany(blocks) - const badAliceRepo = await Repo.load(aliceBlockstore, commitCid) - const diffCar = await badAliceRepo.getDiffCar(bobRepo.cid) - await expect(sync.loadDiff(bobRepo, diffCar, verifier)).rejects.toThrow() - // await aliceBlockstore.clearStaged() + await blockstore.putMany(blocks) + const badrepo = await Repo.load(blockstore, commitCid) + const diffCar = await badrepo.getDiff(bobRepo.cid) + await expect(sync.loadDiff(bobRepo, diffCar, didResolver)).rejects.toThrow() }) - it('throws on a bad signature', async () => { - const obj = util.generateObject() - const blocks = new BlockMap() - const cid = await blocks.add(obj) - const updatedData = await aliceRepo.data.add( - `com.example.test/${TID.next()}`, - cid, - ) - const authToken = await ucanForOperation( - aliceRepo.data, - updatedData, - aliceRepo.did, - aliceAuth, + it('sync a non-historical repo checkout', async () => { + const checkoutBytes = await repo.getCheckout() + const checkout = await sync.loadCheckout( + checkoutBlockstore, + checkoutBytes, + didResolver, ) - const authCid = await blocks.add(authToken) - const dataDiff = await updatedData.blockDiff() - blocks.addMap(dataDiff.blocks) - const root: RepoRoot = { - meta: aliceRepo.root.meta, - prev: aliceRepo.cid, - auth_token: authCid, - data: dataDiff.root, - } - const rootCid = await blocks.add(root) - // we generated a bad sig by signing the data cid instead of root cid - const commit = { - root: rootCid, - sig: await aliceAuth.sign(dataDiff.root.bytes), + const checkoutRepo = await Repo.load(checkoutBlockstore, checkout.root) + const contents = await checkoutRepo.getContents() + expect(contents).toEqual(repoData) + expect(checkout.contents).toEqual(repoData) + }) + + it('does not sync unneeded blocks during checkout', async () => { + const commitPath = await blockstore.getCommitPath(repo.cid, null) + if (!commitPath) { + throw new Error('Could not get commitPath') } - const commitCid = await blocks.add(commit) - await aliceBlockstore.putMany(blocks) - const badAliceRepo = await Repo.load(aliceBlockstore, commitCid) - const diffCar = await badAliceRepo.getDiffCar(bobRepo.cid) - await expect(sync.loadDiff(bobRepo, diffCar, verifier)).rejects.toThrow() + const hasGenesisCommit = await checkoutBlockstore.has(commitPath[0]) + expect(hasGenesisCommit).toBeFalsy() }) }) diff --git a/packages/repo/tsconfig.json b/packages/repo/tsconfig.json index ca20365f26f..46d3e15d1fe 100644 --- a/packages/repo/tsconfig.json +++ b/packages/repo/tsconfig.json @@ -6,8 +6,9 @@ }, "include": ["./src","__tests__/**/**.ts"], "references": [ - { "path": "../auth/tsconfig.build.json" }, { "path": "../common/tsconfig.build.json" }, + { "path": "../crypto/tsconfig.build.json" }, + { "path": "../did-resolver/tsconfig.build.json" }, { "path": "../nsid/tsconfig.build.json" }, ] } \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json index d705e5ccc11..2fc569e99c1 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -27,7 +27,6 @@ { "path": "./packages/plc/tsconfig.build.json" }, { "path": "./packages/pds/tsconfig.build.json" }, { "path": "./packages/api/tsconfig.build.json" }, - { "path": "./packages/auth/tsconfig.build.json" }, { "path": "./packages/aws/tsconfig.build.json" }, { "path": "./packages/common/tsconfig.build.json" }, { "path": "./packages/crypto/tsconfig.build.json" }, diff --git a/yarn.lock b/yarn.lock index 9601d62c64a..6a23697d704 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4155,13 +4155,6 @@ "@typescript-eslint/types" "5.38.1" eslint-visitor-keys "^3.3.0" -"@ucans/core@0.11.0": - version "0.11.0" - resolved "https://registry.npmjs.org/@ucans/core/-/core-0.11.0.tgz" - integrity sha512-SHX67e313kKBaur5Cp+6WFeOLC7aBhkf1i1jIFpFb9f0f1cvM/lC3mjzOyUBeDg3QwmcN5QSZzaogVFvuVvzvg== - dependencies: - uint8arrays "3.0.0" - JSONStream@^1.0.4: version "1.3.5" resolved "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz"