diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..fe36757 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,21 @@ +name: Node.js Test + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + build: + runs-on: ubuntu-latest + environment: CI + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 22 + registry-url: https://registry.npmjs.org/ + - run: npm ci + - run: npm install + - run: npm test diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..99e5ce8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +/node_modules/ +/.parcel-cache/ +*.DS_Store diff --git a/.npmrc b/.npmrc new file mode 100644 index 0000000..1dab4ed --- /dev/null +++ b/.npmrc @@ -0,0 +1 @@ +save-exact = true diff --git a/buildHelper.ts b/buildHelper.ts new file mode 100644 index 0000000..2c3d010 --- /dev/null +++ b/buildHelper.ts @@ -0,0 +1,83 @@ +import fs from 'node:fs/promises' +import { dirname, join } from 'node:path' +import { fileURLToPath } from 'node:url' +import packageJson from './package.json' with { type: 'json' } +import { fork, spawn } from 'node:child_process' + +type Opts = { type: string, tscArgs: string[], renameFileArgs: string[] } + +const optionsMap: Record = { + esm: { + type: 'module', + tscArgs: ['--project', 'tsconfig.json', '--declaration'], + renameFileArgs: ['esm'] + }, + cjs: { + type: 'commonjs', + tscArgs: ['--project', 'tsconfig.cjs.json', '--declaration'], + renameFileArgs: ['cjs'] + } +} + +const __dirname = dirname(fileURLToPath(import.meta.url)) + +const withTempTypeWrapper = async (newType: string, run: () => Promise | void) => { + const suffix = crypto.randomUUID() + const tempFilePath = join(__dirname, `~$package.json.${suffix}`) + const packageJsonPath = join(__dirname, 'package.json') + await fs.copyFile(packageJsonPath, tempFilePath) + try { + const newContents = structuredClone(packageJson) + newContents.type = newType + await fs.writeFile(packageJsonPath, JSON.stringify(newContents, undefined, 2)) + await run() + } finally { + await fs.copyFile(tempFilePath, packageJsonPath, fs.constants.COPYFILE_FICLONE) + await fs.unlink(tempFilePath) + } +} + +const buildInternal = async (opts: Opts) => { + await new Promise((resolve, reject) => { + const tsc = spawn('tsc', opts.tscArgs, { + env: { + ...process.env, + PATH: [join(__dirname, 'node_modules', '.bin'), process.env.PATH].filter(Boolean).join(':') + }, + stdio: 'inherit' + }) + tsc.on('close', (code) => { + if (code !== 0) { + reject(new Error(`[tsc] code ${code}`)) + } else { + resolve() + } + }) + }) + await new Promise((resolve, reject) => { + const rename = fork(new URL('./renameFiles.mjs', import.meta.url), opts.renameFileArgs, { stdio: 'inherit' }) + rename.on('close', (code) => { + if (code !== 0) { + reject(new Error(`[rename] code ${code}`)) + } else { + resolve() + } + }) + }) +} + +const build = async (opts: Opts) => { + if (packageJson.type !== opts.type) { + return withTempTypeWrapper(opts.type, () => buildInternal(opts)) + } else { + return buildInternal(opts) + } +} + +if (process.argv[2] === 'esm') { + await build(optionsMap.esm) +} else if (process.argv[2] === 'cjs') { + await build(optionsMap.cjs) +} else { + console.error('Invalid build type') +} diff --git a/dist/cjs/SPMessage.cjs b/dist/cjs/SPMessage.cjs new file mode 100644 index 0000000..0623a6e --- /dev/null +++ b/dist/cjs/SPMessage.cjs @@ -0,0 +1,431 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SPMessage = void 0; +const crypto_1 = require("@chelonia/crypto"); +const serdes_1 = require("@chelonia/serdes"); +const turtledash_1 = require("turtledash"); +const encryptedData_js_1 = require("./encryptedData.cjs"); +const functions_js_1 = require("./functions.cjs"); +const signedData_js_1 = require("./signedData.cjs"); +// Takes a raw message and processes it so that EncryptedData and SignedData +// attributes are defined +const decryptedAndVerifiedDeserializedMessage = (head, headJSON, contractID, parsedMessage, additionalKeys, state) => { + const op = head.op; + const height = head.height; + const message = op === SPMessage.OP_ACTION_ENCRYPTED + ? (0, encryptedData_js_1.encryptedIncomingData)(contractID, state, parsedMessage, height, additionalKeys, headJSON, undefined) + : parsedMessage; + // If the operation is SPMessage.OP_KEY_ADD or SPMessage.OP_KEY_UPDATE, + // extract encrypted data from key.meta?.private?.content + if ([SPMessage.OP_KEY_ADD, SPMessage.OP_KEY_UPDATE].includes(op)) { + return message.map((key) => { + return (0, encryptedData_js_1.maybeEncryptedIncomingData)(contractID, state, key, height, additionalKeys, headJSON, (key) => { + if (key.meta?.private?.content) { + key.meta.private.content = (0, encryptedData_js_1.encryptedIncomingData)(contractID, state, key.meta.private.content, height, additionalKeys, headJSON, (value) => { + // Validator function to verify the key matches its expected ID + const computedKeyId = (0, crypto_1.keyId)(value); + if (computedKeyId !== key.id) { + throw new Error(`Key ID mismatch. Expected to decrypt key ID ${key.id} but got ${computedKeyId}`); + } + }); + } + // key.meta?.keyRequest?.contractID could be optionally encrypted + if (key.meta?.keyRequest?.reference) { + try { + key.meta.keyRequest.reference = (0, encryptedData_js_1.maybeEncryptedIncomingData)(contractID, state, key.meta.keyRequest.reference, height, additionalKeys, headJSON)?.valueOf(); + } + catch { + // If we couldn't decrypt it, this value is of no use to us (we + // can't keep track of key requests and key shares), so we delete it + delete key.meta.keyRequest.reference; + } + } + // key.meta?.keyRequest?.contractID could be optionally encrypted + if (key.meta?.keyRequest?.contractID) { + try { + key.meta.keyRequest.contractID = (0, encryptedData_js_1.maybeEncryptedIncomingData)(contractID, state, key.meta.keyRequest.contractID, height, additionalKeys, headJSON)?.valueOf(); + } + catch { + // If we couldn't decrypt it, this value is of no use to us (we + // can't keep track of key requests and key shares), so we delete it + delete key.meta.keyRequest.contractID; + } + } + }); + }); + } + // If the operation is SPMessage.OP_CONTRACT, + // extract encrypted data from keys?.[].meta?.private?.content + if (op === SPMessage.OP_CONTRACT) { + message.keys = message.keys?.map((key) => { + return (0, encryptedData_js_1.maybeEncryptedIncomingData)(contractID, state, key, height, additionalKeys, headJSON, (key) => { + if (!key.meta?.private?.content) + return; + // The following two lines are commented out because this feature + // (using a foreign decryption contract) doesn't seem to be in use and + // the use case seems unclear. + // const decryptionFn = key.meta.private.foreignContractID ? encryptedIncomingForeignData : encryptedIncomingData + // const decryptionContract = key.meta.private.foreignContractID ? key.meta.private.foreignContractID : contractID + const decryptionFn = encryptedData_js_1.encryptedIncomingData; + const decryptionContract = contractID; + key.meta.private.content = decryptionFn(decryptionContract, state, key.meta.private.content, height, additionalKeys, headJSON, (value) => { + const computedKeyId = (0, crypto_1.keyId)(value); + if (computedKeyId !== key.id) { + throw new Error(`Key ID mismatch. Expected to decrypt key ID ${key.id} but got ${computedKeyId}`); + } + }); + }); + }); + } + // If the operation is SPMessage.OP_KEY_SHARE, + // extract encrypted data from keys?.[].meta?.private?.content + if (op === SPMessage.OP_KEY_SHARE) { + return (0, encryptedData_js_1.maybeEncryptedIncomingData)(contractID, state, message, height, additionalKeys, headJSON, (message) => { + message.keys?.forEach((key) => { + if (!key.meta?.private?.content) + return; + const decryptionFn = message.foreignContractID ? encryptedData_js_1.encryptedIncomingForeignData : encryptedData_js_1.encryptedIncomingData; + const decryptionContract = message.foreignContractID || contractID; + key.meta.private.content = decryptionFn(decryptionContract, state, key.meta.private.content, height, additionalKeys, headJSON, (value) => { + const computedKeyId = (0, crypto_1.keyId)(value); + if (computedKeyId !== key.id) { + throw new Error(`Key ID mismatch. Expected to decrypt key ID ${key.id} but got ${computedKeyId}`); + } + }); + }); + }); + } + // If the operation is OP_KEY_REQUEST, the payload might be EncryptedData + // The ReplyWith attribute is SignedData + if (op === SPMessage.OP_KEY_REQUEST) { + return (0, encryptedData_js_1.maybeEncryptedIncomingData)(contractID, state, message, height, additionalKeys, headJSON, (msg) => { + msg.replyWith = (0, signedData_js_1.signedIncomingData)(msg.contractID, undefined, msg.replyWith, msg.height, headJSON); + }); + } + // If the operation is OP_ACTION_UNENCRYPTED, it may contain an inner + // signature + // Actions must be signed using a key for the current contract + if (op === SPMessage.OP_ACTION_UNENCRYPTED && (0, signedData_js_1.isRawSignedData)(message)) { + return (0, signedData_js_1.signedIncomingData)(contractID, state, message, height, headJSON); + } + // Inner signatures are handled by EncryptedData + if (op === SPMessage.OP_ACTION_ENCRYPTED) { + return message; + } + if (op === SPMessage.OP_KEY_DEL) { + return message.map((key) => { + return (0, encryptedData_js_1.maybeEncryptedIncomingData)(contractID, state, key, height, additionalKeys, headJSON, undefined); + }); + } + if (op === SPMessage.OP_KEY_REQUEST_SEEN) { + return (0, encryptedData_js_1.maybeEncryptedIncomingData)(contractID, state, parsedMessage, height, additionalKeys, headJSON, undefined); + } + // If the operation is OP_ATOMIC, call this function recursively + if (op === SPMessage.OP_ATOMIC) { + return message + .map(([opT, opV]) => [ + opT, + decryptedAndVerifiedDeserializedMessage({ ...head, op: opT }, headJSON, contractID, opV, additionalKeys, state) + ]); + } + return message; +}; +class SPMessage { + // flow type annotations to make flow happy + _mapping; + _head; + _message; + _signedMessageData; + _direction; + _decryptedValue; + _innerSigningKeyId; + static OP_CONTRACT = 'c'; + static OP_ACTION_ENCRYPTED = 'ae'; // e2e-encrypted action + static OP_ACTION_UNENCRYPTED = 'au'; // publicly readable action + static OP_KEY_ADD = 'ka'; // add this key to the list of keys allowed to write to this contract, or update an existing key + static OP_KEY_DEL = 'kd'; // remove this key from authorized keys + static OP_KEY_UPDATE = 'ku'; // update key in authorized keys + static OP_PROTOCOL_UPGRADE = 'pu'; + static OP_PROP_SET = 'ps'; // set a public key/value pair + static OP_PROP_DEL = 'pd'; // delete a public key/value pair + static OP_CONTRACT_AUTH = 'ca'; // authorize a contract + static OP_CONTRACT_DEAUTH = 'cd'; // deauthorize a contract + static OP_ATOMIC = 'a'; // atomic op + static OP_KEY_SHARE = 'ks'; // key share + static OP_KEY_REQUEST = 'kr'; // key request + static OP_KEY_REQUEST_SEEN = 'krs'; // key request response + // eslint-disable-next-line camelcase + static createV1_0({ contractID, previousHEAD = null, previousKeyOp = null, + // Height will be automatically set to the correct value when sending + // The reason to set it to Number.MAX_SAFE_INTEGER is so that we can + // temporarily process outgoing messages with signature validation + // still working + height = Number.MAX_SAFE_INTEGER, op, manifest }) { + const head = { + version: '1.0.0', + previousHEAD, + previousKeyOp, + height, + contractID, + op: op[0], + manifest + }; + return new this(messageToParams(head, op[1])); + } + // SPMessage.cloneWith could be used when make a SPMessage object having the same id() + // https://github.com/okTurtles/group-income/issues/1503 + static cloneWith(targetHead, targetOp, sources) { + const head = Object.assign({}, targetHead, sources); + return new this(messageToParams(head, targetOp[1])); + } + static deserialize(value, additionalKeys, state, unwrapMaybeEncryptedDataFn = encryptedData_js_1.unwrapMaybeEncryptedData) { + if (!value) + throw new Error(`deserialize bad value: ${value}`); + const { head: headJSON, ...parsedValue } = JSON.parse(value); + const head = JSON.parse(headJSON); + const contractID = head.op === SPMessage.OP_CONTRACT ? (0, functions_js_1.createCID)(value, functions_js_1.multicodes.SHELTER_CONTRACT_DATA) : head.contractID; + // Special case for OP_CONTRACT, since the keys are not yet present in the + // state + if (!state?._vm?.authorizedKeys && head.op === SPMessage.OP_CONTRACT) { + const value = (0, signedData_js_1.rawSignedIncomingData)(parsedValue); + const authorizedKeys = Object.fromEntries(value.valueOf()?.keys.map(wk => { + const k = unwrapMaybeEncryptedDataFn(wk); + if (!k) + return null; + return [k.data.id, k.data]; + // eslint-disable-next-line no-use-before-define + }).filter(Boolean)); + state = { + _vm: { + type: head.type, + authorizedKeys + } + }; + } + const signedMessageData = (0, signedData_js_1.signedIncomingData)(contractID, state, parsedValue, head.height, headJSON, (message) => decryptedAndVerifiedDeserializedMessage(head, headJSON, contractID, message, additionalKeys, state)); + return new this({ + direction: 'incoming', + mapping: { key: (0, functions_js_1.createCID)(value, functions_js_1.multicodes.SHELTER_CONTRACT_DATA), value }, + head, + signedMessageData + }); + } + static deserializeHEAD(value) { + if (!value) + throw new Error(`deserialize bad value: ${value}`); + let head, hash; + const result = { + get head() { + if (head === undefined) { + head = JSON.parse(JSON.parse(value).head); + } + return head; + }, + get hash() { + if (!hash) { + hash = (0, functions_js_1.createCID)(value, functions_js_1.multicodes.SHELTER_CONTRACT_DATA); + } + return hash; + }, + get contractID() { + return result.head?.contractID ?? result.hash; + }, + // `description` is not a getter to prevent the value from being copied + // if the object is cloned or serialized + description() { + const type = this.head.op; + return ``; + }, + get isFirstMessage() { + return !result.head?.contractID; + } + }; + return result; + } + constructor(params) { + this._direction = params.direction; + this._mapping = params.mapping; + this._head = params.head; + this._signedMessageData = params.signedMessageData; + // perform basic sanity check + const type = this.opType(); + let atomicTopLevel = true; + const validate = (type, message) => { + switch (type) { + case SPMessage.OP_CONTRACT: + if (!this.isFirstMessage() || !atomicTopLevel) + throw new Error('OP_CONTRACT: must be first message'); + break; + case SPMessage.OP_ATOMIC: + if (!atomicTopLevel) { + throw new Error('OP_ATOMIC not allowed inside of OP_ATOMIC'); + } + if (!Array.isArray(message)) { + throw new TypeError('OP_ATOMIC must be of an array type'); + } + atomicTopLevel = false; + message.forEach(([t, m]) => validate(t, m)); + break; + case SPMessage.OP_KEY_ADD: + case SPMessage.OP_KEY_DEL: + case SPMessage.OP_KEY_UPDATE: + if (!Array.isArray(message)) + throw new TypeError('OP_KEY_{ADD|DEL|UPDATE} must be of an array type'); + break; + case SPMessage.OP_KEY_SHARE: + case SPMessage.OP_KEY_REQUEST: + case SPMessage.OP_KEY_REQUEST_SEEN: + case SPMessage.OP_ACTION_ENCRYPTED: + case SPMessage.OP_ACTION_UNENCRYPTED: + // nothing for now + break; + default: + throw new Error(`unsupported op: ${type}`); + } + }; + // this._message is set as a getter to verify the signature only once the + // message contents are read + Object.defineProperty(this, '_message', { + get: ((validated) => () => { + const message = this._signedMessageData.valueOf(); + // If we haven't validated the message, validate it now + if (!validated) { + validate(type, message); + validated = true; + } + return message; + })() + }); + } + decryptedValue() { + if (this._decryptedValue) + return this._decryptedValue; + try { + const value = this.message(); + // TODO: This uses `unwrapMaybeEncryptedData` instead of a configurable + // version based on `skipDecryptionAttempts`. This is fine based on current + // use, and also something else might be confusing based on the explicit + // name of this function, `decryptedValue`. + const data = (0, encryptedData_js_1.unwrapMaybeEncryptedData)(value); + // Did decryption succeed? (unwrapMaybeEncryptedData will return undefined + // on failure) + if (data?.data) { + // The data inside could be signed. In this case, we unwrap that to get + // to the inner contents + if ((0, signedData_js_1.isSignedData)(data.data)) { + this._innerSigningKeyId = data.data.signingKeyId; + this._decryptedValue = data.data.valueOf(); + } + else { + this._decryptedValue = data.data; + } + } + return this._decryptedValue; + } + catch { + // Signature or encryption error + // We don't log this error because it's already logged when the value is + // retrieved + return undefined; + } + } + innerSigningKeyId() { + if (!this._decryptedValue) { + this.decryptedValue(); + } + return this._innerSigningKeyId; + } + head() { return this._head; } + message() { return this._message; } + op() { return [this.head().op, this.message()]; } + rawOp() { return [this.head().op, this._signedMessageData]; } + opType() { return this.head().op; } + opValue() { return this.message(); } + signingKeyId() { return this._signedMessageData.signingKeyId; } + manifest() { return this.head().manifest; } + description() { + const type = this.opType(); + let desc = ``; + } + isFirstMessage() { return !this.head().contractID; } + contractID() { return this.head().contractID || this.hash(); } + serialize() { return this._mapping.value; } + hash() { return this._mapping.key; } + previousKeyOp() { return this._head.previousKeyOp; } + height() { return this._head.height; } + id() { + // TODO: Schedule for later removal + throw new Error('SPMessage.id() was called but it has been removed'); + } + direction() { + return this._direction; + } + // `isKeyOp` is used to filter out non-key operations for providing an + // abbreviated chain fo snapshot validation + isKeyOp() { + let value; + return !!(keyOps.includes(this.opType()) || + (this.opType() === SPMessage.OP_ATOMIC && Array.isArray(value = this.opValue()) && value.some(([opT]) => { + return keyOps.includes(opT); + }))); + } + static get [serdes_1.serdesTagSymbol]() { + return 'SPMessage'; + } + static [serdes_1.serdesSerializeSymbol](m) { + return [m.serialize(), m.direction(), m.decryptedValue(), m.innerSigningKeyId()]; + } + static [serdes_1.serdesDeserializeSymbol]([serialized, direction, decryptedValue, innerSigningKeyId]) { + const m = SPMessage.deserialize(serialized); + m._direction = direction; + m._decryptedValue = decryptedValue; + m._innerSigningKeyId = innerSigningKeyId; + return m; + } +} +exports.SPMessage = SPMessage; +function messageToParams(head, message) { + // NOTE: the JSON strings generated here must be preserved forever. + // do not ever regenerate this message using the contructor. + // instead store it using serialize() and restore it using deserialize(). + // The issue is that different implementations of JavaScript engines might generate different strings + // when serializing JS objects using JSON.stringify + // and that would lead to different hashes resulting from createCID. + // So to get around this we save the serialized string upon creation + // and keep a copy of it (instead of regenerating it as needed). + // https://github.com/okTurtles/group-income/pull/1513#discussion_r1142809095 + let mapping; + return { + direction: (0, turtledash_1.has)(message, 'recreate') ? 'outgoing' : 'incoming', + // Lazy computation of mapping to prevent us from serializing outgoing + // atomic operations + get mapping() { + if (!mapping) { + const headJSON = JSON.stringify(head); + const messageJSON = { ...message.serialize(headJSON), head: headJSON }; + const value = JSON.stringify(messageJSON); + mapping = { + key: (0, functions_js_1.createCID)(value, functions_js_1.multicodes.SHELTER_CONTRACT_DATA), + value + }; + } + return mapping; + }, + head, + signedMessageData: message + }; +} +// Operations that affect valid keys +const keyOps = [SPMessage.OP_CONTRACT, SPMessage.OP_KEY_ADD, SPMessage.OP_KEY_DEL, SPMessage.OP_KEY_UPDATE]; diff --git a/dist/cjs/SPMessage.d.cts b/dist/cjs/SPMessage.d.cts new file mode 100644 index 0000000..98f8612 --- /dev/null +++ b/dist/cjs/SPMessage.d.cts @@ -0,0 +1,215 @@ +import type { Key } from '@chelonia/crypto'; +import { CURVE25519XSALSA20POLY1305, EDWARDS25519SHA512BATCH, XSALSA20POLY1305 } from '@chelonia/crypto'; +import { serdesDeserializeSymbol, serdesSerializeSymbol, serdesTagSymbol } from '@chelonia/serdes'; +import type { EncryptedData } from './encryptedData.cjs'; +import type { SignedData } from './signedData.cjs'; +import type { ChelContractState, JSONObject, JSONType } from './types.cjs'; +export type SPKeyType = typeof EDWARDS25519SHA512BATCH | typeof CURVE25519XSALSA20POLY1305 | typeof XSALSA20POLY1305; +export type SPKeyPurpose = 'enc' | 'sig' | 'sak'; +export type SPKey = { + id: string; + name: string; + purpose: SPKeyPurpose[]; + ringLevel: number; + permissions: '*' | string[]; + allowedActions?: '*' | string[]; + meta?: { + quantity?: number; + expires?: number; + private?: { + transient?: boolean; + content?: EncryptedData; + shareable?: boolean; + oldKeys?: string; + }; + keyRequest?: { + contractID?: string; + reference?: string | EncryptedData; + }; + }; + data: string; + foreignKey?: string; + _notBeforeHeight: number; + _notAfterHeight?: number; + _private?: string; +}; +export type SPOpContract = { + type: string; + keys: (SPKey | EncryptedData)[]; + parentContract?: string; +}; +export type ProtoSPOpActionUnencrypted = { + action: string; + data: JSONType; + meta: JSONObject; +}; +export type SPOpActionUnencrypted = ProtoSPOpActionUnencrypted | SignedData; +export type SPOpActionEncrypted = EncryptedData; +export type SPOpKeyAdd = (SPKey | EncryptedData)[]; +export type SPOpKeyDel = (string | EncryptedData)[]; +export type SPOpPropSet = { + key: string; + value: JSONType; +}; +export type ProtoSPOpKeyShare = { + contractID: string; + keys: SPKey[]; + foreignContractID?: string; + keyRequestHash?: string; + keyRequestHeight?: number; +}; +export type SPOpKeyShare = ProtoSPOpKeyShare | EncryptedData; +export type ProtoSPOpKeyRequest = { + contractID: string; + height: number; + replyWith: SignedData<{ + encryptionKeyId: string; + responseKey: EncryptedData; + }>; + request: string; +}; +export type SPOpKeyRequest = ProtoSPOpKeyRequest | EncryptedData; +export type ProtoSPOpKeyRequestSeen = { + keyRequestHash: string; + keyShareHash?: string; + success: boolean; +}; +export type SPOpKeyRequestSeen = ProtoSPOpKeyRequestSeen | EncryptedData; +export type SPKeyUpdate = { + name: string; + id?: string; + oldKeyId: string; + data?: string; + purpose?: string[]; + permissions?: string[]; + allowedActions?: '*' | string[]; + meta?: { + quantity?: number; + expires?: number; + private?: { + transient?: boolean; + content?: string; + shareable?: boolean; + oldKeys?: string; + }; + }; +}; +export type SPOpKeyUpdate = (SPKeyUpdate | EncryptedData)[]; +export type SPOpType = 'c' | 'a' | 'ae' | 'au' | 'ka' | 'kd' | 'ku' | 'pu' | 'ps' | 'pd' | 'ks' | 'kr' | 'krs'; +type ProtoSPOpValue = SPOpContract | SPOpActionEncrypted | SPOpActionUnencrypted | SPOpKeyAdd | SPOpKeyDel | SPOpPropSet | SPOpKeyShare | SPOpKeyRequest | SPOpKeyRequestSeen | SPOpKeyUpdate; +export type ProtoSPOpMap = { + 'c': SPOpContract; + 'ae': SPOpActionEncrypted; + 'au': SPOpActionUnencrypted; + 'ka': SPOpKeyAdd; + 'kd': SPOpKeyDel; + 'ku': SPOpKeyUpdate; + 'pu': never; + 'ps': SPOpPropSet; + 'pd': never; + 'ks': SPOpKeyShare; + 'kr': SPOpKeyRequest; + 'krs': SPOpKeyRequestSeen; +}; +export type SPOpAtomic = { + [K in keyof ProtoSPOpMap]: [K, ProtoSPOpMap[K]]; +}[keyof ProtoSPOpMap][]; +export type SPOpValue = ProtoSPOpValue | SPOpAtomic; +export type SPOpRaw = [SPOpType, SignedData]; +export type SPOpMap = ProtoSPOpMap & { + 'a': SPOpAtomic; +}; +export type SPOp = { + [K in keyof SPOpMap]: [K, SPOpMap[K]]; +}[keyof SPOpMap]; +export type SPMsgDirection = 'incoming' | 'outgoing'; +export type SPHead = { + version: '1.0.0'; + op: SPOpType; + height: number; + contractID: string | null; + previousKeyOp: string | null; + previousHEAD: string | null; + manifest: string; +}; +type SPMsgParams = { + direction: SPMsgDirection; + mapping: { + key: string; + value: string; + }; + head: SPHead; + signedMessageData: SignedData; +}; +export declare class SPMessage { + _mapping: { + key: string; + value: string; + }; + _head: SPHead; + _message: SPOpValue; + _signedMessageData: SignedData; + _direction: SPMsgDirection; + _decryptedValue?: unknown; + _innerSigningKeyId?: string; + static OP_CONTRACT: "c"; + static OP_ACTION_ENCRYPTED: "ae"; + static OP_ACTION_UNENCRYPTED: "au"; + static OP_KEY_ADD: "ka"; + static OP_KEY_DEL: "kd"; + static OP_KEY_UPDATE: "ku"; + static OP_PROTOCOL_UPGRADE: "pu"; + static OP_PROP_SET: "ps"; + static OP_PROP_DEL: "pd"; + static OP_CONTRACT_AUTH: "ca"; + static OP_CONTRACT_DEAUTH: "cd"; + static OP_ATOMIC: "a"; + static OP_KEY_SHARE: "ks"; + static OP_KEY_REQUEST: "kr"; + static OP_KEY_REQUEST_SEEN: "krs"; + static createV1_0({ contractID, previousHEAD, previousKeyOp, height, op, manifest }: { + contractID: string | null; + previousHEAD?: string | null; + previousKeyOp?: string | null; + height?: number; + op: SPOpRaw; + manifest: string; + }): SPMessage; + static cloneWith(targetHead: SPHead, targetOp: SPOpRaw, sources: Partial): SPMessage; + static deserialize(value: string, additionalKeys?: Record, state?: ChelContractState, unwrapMaybeEncryptedDataFn?: (data: SPKey | EncryptedData) => { + encryptionKeyId: string | null; + data: SPKey; + } | undefined): SPMessage; + static deserializeHEAD(value: string): { + head: SPHead; + hash: string; + contractID: string; + isFirstMessage: boolean; + description: () => string; + }; + constructor(params: SPMsgParams); + decryptedValue(): unknown | undefined; + innerSigningKeyId(): string | undefined; + head(): SPHead; + message(): SPOpValue; + op(): SPOp; + rawOp(): SPOpRaw; + opType(): SPOpType; + opValue(): SPOpValue; + signingKeyId(): string; + manifest(): string; + description(): string; + isFirstMessage(): boolean; + contractID(): string; + serialize(): string; + hash(): string; + previousKeyOp(): string | null; + height(): number; + id(): string; + direction(): 'incoming' | 'outgoing'; + isKeyOp(): boolean; + static get [serdesTagSymbol](): string; + static [serdesSerializeSymbol](m: SPMessage): unknown[]; + static [serdesDeserializeSymbol]([serialized, direction, decryptedValue, innerSigningKeyId]: [string, SPMsgDirection, object, string]): SPMessage; +} +export {}; diff --git a/dist/cjs/Secret.cjs b/dist/cjs/Secret.cjs new file mode 100644 index 0000000..bcfea1d --- /dev/null +++ b/dist/cjs/Secret.cjs @@ -0,0 +1,29 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Secret = void 0; +const serdes_1 = require("@chelonia/serdes"); +/* Wrapper class for secrets, which identifies them as such and prevents them +from being logged */ +// Use a `WeakMap` to store the actual secret outside of the returned `Secret` +// object. This ensures that the only way to access the secret is via the +// `.valueOf()` method, and it prevents accidentally logging things that +// shouldn't be logged. +const wm = new WeakMap(); +class Secret { + static [serdes_1.serdesDeserializeSymbol](secret) { + return new this(secret); + } + static [serdes_1.serdesSerializeSymbol](secret) { + return wm.get(secret); + } + static get [serdes_1.serdesTagSymbol]() { + return '__chelonia_Secret'; + } + constructor(value) { + wm.set(this, value); + } + valueOf() { + return wm.get(this); + } +} +exports.Secret = Secret; diff --git a/dist/cjs/Secret.d.cts b/dist/cjs/Secret.d.cts new file mode 100644 index 0000000..39e5744 --- /dev/null +++ b/dist/cjs/Secret.d.cts @@ -0,0 +1,8 @@ +import { serdesDeserializeSymbol, serdesSerializeSymbol, serdesTagSymbol } from '@chelonia/serdes'; +export declare class Secret { + static [serdesDeserializeSymbol](secret: T): Secret; + static [serdesSerializeSymbol](secret: Secret): any; + static get [serdesTagSymbol](): string; + constructor(value: T); + valueOf(): T; +} diff --git a/dist/cjs/chelonia-utils.cjs b/dist/cjs/chelonia-utils.cjs new file mode 100644 index 0000000..4b87870 --- /dev/null +++ b/dist/cjs/chelonia-utils.cjs @@ -0,0 +1,34 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const sbp_1 = __importDefault(require("@sbp/sbp")); +// This file contains non-core parts of Chelonia, i.e., functionality that is +// useful but optional. The threshold for something being 'optional' generally +// is something that can be implemented externally using only public Chelonia +// selectors. +// Optional functionality can make certain assumptions about contracts or +// actions to make things simpler or easier to implement. +// Currently, a single selector is defined: 'chelonia/kv/queuedSet'. +// TODO: Other things should be moved to this file, such as `encryptedAction` +// (the wrapper) and 'gi.actions/out/rotateKeys'. +exports.default = (0, sbp_1.default)('sbp/selectors/register', { + // This selector is a wrapper for the `chelonia/kv/set` selector that uses + // the contract queue and allows referring to keys by name, with default key + // names set to `csk` and `cek` for signatures and encryption, respectively. + // For most 'simple' use cases, this selector is a better choice than + // `chelonia/kv/set`. However, the `chelonia/kv/set` primitive is needed if + // the queueing logic needs to be more advanced, the key to use requires + // custom logic or _if the `onconflict` callback also needs to be queued_. + 'chelonia/kv/queuedSet': ({ contractID, key, data, onconflict, ifMatch, encryptionKeyName = 'cek', signingKeyName = 'csk' }) => { + return (0, sbp_1.default)('chelonia/queueInvocation', contractID, () => { + return (0, sbp_1.default)('chelonia/kv/set', contractID, key, data, { + ifMatch, + encryptionKeyId: (0, sbp_1.default)('chelonia/contract/currentKeyIdByName', contractID, encryptionKeyName), + signingKeyId: (0, sbp_1.default)('chelonia/contract/currentKeyIdByName', contractID, signingKeyName), + onconflict + }); + }); + } +}); diff --git a/dist/cjs/chelonia-utils.d.cts b/dist/cjs/chelonia-utils.d.cts new file mode 100644 index 0000000..d451d2b --- /dev/null +++ b/dist/cjs/chelonia-utils.d.cts @@ -0,0 +1,2 @@ +declare const _default: string[]; +export default _default; diff --git a/dist/cjs/chelonia.cjs b/dist/cjs/chelonia.cjs new file mode 100644 index 0000000..c440844 --- /dev/null +++ b/dist/cjs/chelonia.cjs @@ -0,0 +1,1850 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ACTION_REGEX = exports.SPMessage = void 0; +require("@sbp/okturtles.eventqueue"); +require("@sbp/okturtles.events"); +const sbp_1 = __importDefault(require("@sbp/sbp")); +const turtledash_1 = require("turtledash"); +const functions_js_1 = require("./functions.cjs"); +const index_js_1 = require("./pubsub/index.cjs"); +const crypto_1 = require("@chelonia/crypto"); +const errors_js_1 = require("./errors.cjs"); +const events_js_1 = require("./events.cjs"); +const SPMessage_js_1 = require("./SPMessage.cjs"); +Object.defineProperty(exports, "SPMessage", { enumerable: true, get: function () { return SPMessage_js_1.SPMessage; } }); +require("./chelonia-utils.cjs"); +const encryptedData_js_1 = require("./encryptedData.cjs"); +require("./files.cjs"); +require("./internals.cjs"); +const signedData_js_1 = require("./signedData.cjs"); +require("./time-sync.cjs"); +const utils_js_1 = require("./utils.cjs"); +exports.ACTION_REGEX = /^((([\w.]+)\/([^/]+))(?:\/(?:([^/]+)\/)?)?)\w*/; +// ACTION_REGEX.exec('gi.contracts/group/payment/process') +// 0 => 'gi.contracts/group/payment/process' +// 1 => 'gi.contracts/group/payment/' +// 2 => 'gi.contracts/group' +// 3 => 'gi.contracts' +// 4 => 'group' +// 5 => 'payment' +exports.default = (0, sbp_1.default)('sbp/selectors/register', { + // https://www.wordnik.com/words/chelonia + // https://gitlab.okturtles.org/okturtles/group-income/-/wikis/E2E-Protocol/Framework.md#alt-names + 'chelonia/_init': function () { + this.config = { + // TODO: handle connecting to multiple servers for federation + get connectionURL() { throw new Error('Invalid use of connectionURL before initialization'); }, + // override! + set connectionURL(value) { Object.defineProperty(this, 'connectionURL', { value, writable: true }); }, + stateSelector: 'chelonia/private/state', // override to integrate with, for example, vuex + contracts: { + defaults: { + modules: {}, // '' => resolved module import + exposedGlobals: {}, + allowedDomains: [], + allowedSelectors: [], + preferSlim: false + }, + overrides: {}, // override default values per-contract + manifests: {} // override! contract names => manifest hashes + }, + whitelisted: (action) => !!this.whitelistedActions[action], + reactiveSet: (obj, key, value) => { obj[key] = value; return value; }, // example: set to Vue.set + fetch: (...args) => fetch(...args), + reactiveDel: (obj, key) => { delete obj[key]; }, + // acceptAllMessages disables checking whether we are expecting a message + // or not for processing + acceptAllMessages: false, + skipActionProcessing: false, + skipDecryptionAttempts: false, + skipSideEffects: false, + // Strict processing will treat all processing errors as unrecoverable + // This is useful, e.g., in the server, to prevent invalid messages from + // being added to the database + strictProcessing: false, + // Strict ordering will throw on past events with ChelErrorAlreadyProcessed + // Similarly, future events will not be reingested and will throw + // with ChelErrorDBBadPreviousHEAD + strictOrdering: false, + connectionOptions: { + maxRetries: Infinity, // See https://github.com/okTurtles/group-income/issues/1183 + reconnectOnTimeout: true // can be enabled since we are not doing auth via web sockets + }, + hooks: { + preHandleEvent: null, // async (message: SPMessage) => {} + postHandleEvent: null, // async (message: SPMessage) => {} + processError: null, // (e: Error, message: SPMessage) => {} + sideEffectError: null, // (e: Error, message: SPMessage) => {} + handleEventError: null, // (e: Error, message: SPMessage) => {} + syncContractError: null, // (e: Error, contractID: string) => {} + pubsubError: null // (e:Error, socket: Socket) + }, + unwrapMaybeEncryptedData: encryptedData_js_1.unwrapMaybeEncryptedData + }; + // Used in publishEvent to cancel sending events after reset (logout) + this._instance = Object.create(null); + this.abortController = new AbortController(); + this.state = { + contracts: {}, // contractIDs => { type, HEAD } (contracts we've subscribed to) + pending: [] // prevents processing unexpected data from a malicious server + }; + this.manifestToContract = {}; + this.whitelistedActions = {}; + this.currentSyncs = Object.create(null); + this.postSyncOperations = Object.create(null); + this.sideEffectStacks = Object.create(null); // [contractID]: Array + this.sideEffectStack = (contractID) => { + let stack = this.sideEffectStacks[contractID]; + if (!stack) { + this.sideEffectStacks[contractID] = stack = []; + } + return stack; + }; + // setPostSyncOp defines operations to be run after all recent events have + // been processed. This is useful, for example, when responding to + // OP_KEY_REQUEST, as we want to send an OP_KEY_SHARE only to yet-unanswered + // requests, which is information in the future (from the point of view of + // the event handler). + // We could directly enqueue the operations, but by using a map we avoid + // enqueueing more operations than necessary + // The operations defined here will be executed: + // (1) After a call to /sync or /syncContract; or + // (2) After an event has been handled, if it was received on a web socket + this.setPostSyncOp = (contractID, key, op) => { + this.postSyncOperations[contractID] = this.postSyncOperations[contractID] || Object.create(null); + this.postSyncOperations[contractID][key] = op; + }; + const secretKeyGetter = (o, p) => { + if ((0, turtledash_1.has)(o, p)) + return o[p]; + const rootState = (0, sbp_1.default)(this.config.stateSelector); + if (rootState?.secretKeys && (0, turtledash_1.has)(rootState.secretKeys, p)) { + const key = (0, crypto_1.deserializeKey)(rootState.secretKeys[p]); + o[p] = key; + return key; + } + }; + const secretKeyList = (o) => { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + const stateKeys = Object.keys(rootState?.secretKeys || {}); + return Array.from(new Set([...Object.keys(o), ...stateKeys])); + }; + this.transientSecretKeys = new Proxy(Object.create(null), { + get: secretKeyGetter, + ownKeys: secretKeyList + }); + this.ephemeralReferenceCount = Object.create(null); + // subscriptionSet includes all the contracts in state.contracts for which + // we can process events (contracts for which we have called /sync) + // The reason we can't use, e.g., Object.keys(state.contracts), is that + // when resetting the state (calling /reset, e.g., after logging out) we may + // still receive events for old contracts that belong to the old session. + // Those events must be ignored or discarded until the new session is set up + // (i.e., login has finished running) because we don't necessarily have + // all the information needed to process events in those contracts, such as + // secret keys. + // A concrete example is: + // 1. user1 logs in to the group and rotates the group keys, then logs out + // 2. user2 logs in to the group. + // 3. If an event came over the web socket for the group, we must not + // process it before we've processed the OP_KEY_SHARE containing the + // new keys, or else we'll build an incorrect state. + // The example above is simplified, but this is even more of an issue + // when there is a third contract (for example, a group chatroom) using + // those rotated keys as foreign keys. + this.subscriptionSet = new Set(); + // pending includes contracts that are scheduled for syncing or in the + // process of syncing for the first time. After sync completes for the + // first time, they are removed from pending and added to subscriptionSet + this.pending = []; + }, + 'chelonia/config': function () { + return { + ...(0, turtledash_1.cloneDeep)(this.config), + fetch: this.config.fetch, + reactiveSet: this.config.reactiveSet, + reactiveDel: this.config.reactiveDel + }; + }, + 'chelonia/configure': async function (config) { + (0, turtledash_1.merge)(this.config, config); + // merge will strip the hooks off of config.hooks when merging from the root of the object + // because they are functions and cloneDeep doesn't clone functions + Object.assign(this.config.hooks, config.hooks || {}); + // using Object.assign here instead of merge to avoid stripping away imported modules + if (config.contracts) { + Object.assign(this.config.contracts.defaults, config.contracts.defaults || {}); + const manifests = this.config.contracts.manifests; + console.debug('[chelonia] preloading manifests:', Object.keys(manifests)); + for (const contractName in manifests) { + await (0, sbp_1.default)('chelonia/private/loadManifest', contractName, manifests[contractName]); + } + } + if ((0, turtledash_1.has)(config, 'skipDecryptionAttempts')) { + if (config.skipDecryptionAttempts) { + this.config.unwrapMaybeEncryptedData = (data) => { + if (!(0, encryptedData_js_1.isEncryptedData)(data)) { + return { + encryptionKeyId: null, data + }; + } + }; + } + else { + this.config.unwrapMaybeEncryptedData = encryptedData_js_1.unwrapMaybeEncryptedData; + } + } + }, + 'chelonia/reset': async function (newState, postCleanupFn) { + // Allow optional newState OR postCleanupFn + if (typeof newState === 'function' && typeof postCleanupFn === 'undefined') { + postCleanupFn = newState; + newState = undefined; + } + if (this.pubsub) { + (0, sbp_1.default)('chelonia/private/stopClockSync'); + } + // wait for any pending sync operations to finish before saving + Object.keys(this.postSyncOperations).forEach(cID => { + (0, sbp_1.default)('chelonia/private/enqueuePostSyncOps', cID); + }); + await (0, sbp_1.default)('chelonia/contract/waitPublish'); + await (0, sbp_1.default)('chelonia/contract/wait'); + // do this again to catch operations that are the result of side-effects + // or post sync ops + Object.keys(this.postSyncOperations).forEach(cID => { + (0, sbp_1.default)('chelonia/private/enqueuePostSyncOps', cID); + }); + await (0, sbp_1.default)('chelonia/contract/waitPublish'); + await (0, sbp_1.default)('chelonia/contract/wait'); + const result = await postCleanupFn?.(); + // The following are all synchronous operations + const rootState = (0, sbp_1.default)(this.config.stateSelector); + // Cancel all outgoing messages by replacing this._instance + this._instance = Object.create(null); + this.abortController.abort(); + this.abortController = new AbortController(); + // Remove all contracts, including all contracts from pending + (0, utils_js_1.reactiveClearObject)(rootState, this.config.reactiveDel); + this.config.reactiveSet(rootState, 'contracts', Object.create(null)); + (0, utils_js_1.clearObject)(this.ephemeralReferenceCount); + this.pending.splice(0); + (0, utils_js_1.clearObject)(this.currentSyncs); + (0, utils_js_1.clearObject)(this.postSyncOperations); + (0, utils_js_1.clearObject)(this.sideEffectStacks); + const removedContractIDs = Array.from(this.subscriptionSet); + this.subscriptionSet.clear(); + (0, sbp_1.default)('chelonia/clearTransientSecretKeys'); + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.CHELONIA_RESET); + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { added: [], removed: removedContractIDs }); + if (this.pubsub) { + (0, sbp_1.default)('chelonia/private/startClockSync'); + } + if (newState) { + Object.entries(newState).forEach(([key, value]) => { + this.config.reactiveSet(rootState, key, value); + }); + } + return result; + }, + 'chelonia/storeSecretKeys': function (wkeys) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + if (!rootState.secretKeys) + this.config.reactiveSet(rootState, 'secretKeys', Object.create(null)); + let keys = wkeys.valueOf(); + if (!keys) + return; + if (!Array.isArray(keys)) + keys = [keys]; + keys.forEach(({ key, transient }) => { + if (!key) + return; + if (typeof key === 'string') { + key = (0, crypto_1.deserializeKey)(key); + } + const id = (0, crypto_1.keyId)(key); + // Store transient keys transientSecretKeys + if (!(0, turtledash_1.has)(this.transientSecretKeys, id)) { + this.transientSecretKeys[id] = key; + } + if (transient) + return; + // If the key is marked as persistent, write it to the state as well + if (!(0, turtledash_1.has)(rootState.secretKeys, id)) { + this.config.reactiveSet(rootState.secretKeys, id, (0, crypto_1.serializeKey)(key, true)); + } + }); + }, + 'chelonia/clearTransientSecretKeys': function (ids) { + if (Array.isArray(ids)) { + ids.forEach((id) => { + delete this.transientSecretKeys[id]; + }); + } + else { + Object.keys(this.transientSecretKeys).forEach((id) => { + delete this.transientSecretKeys[id]; + }); + } + }, + 'chelonia/haveSecretKey': function (keyId, persistent) { + if (!persistent && (0, turtledash_1.has)(this.transientSecretKeys, keyId)) + return true; + const rootState = (0, sbp_1.default)(this.config.stateSelector); + return !!rootState?.secretKeys && (0, turtledash_1.has)(rootState.secretKeys, keyId); + }, + 'chelonia/contract/isResyncing': function (contractIDOrState) { + if (typeof contractIDOrState === 'string') { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + return !!contractIDOrState?._volatile?.dirty || !!contractIDOrState?._volatile?.resyncing; + }, + 'chelonia/contract/hasKeyShareBeenRespondedBy': function (contractIDOrState, requestedToContractID, reference) { + if (typeof contractIDOrState === 'string') { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const result = Object.values(contractIDOrState?._vm.authorizedKeys || {}).some((r) => { + return r?.meta?.keyRequest?.responded && r.meta.keyRequest.contractID === requestedToContractID && (!reference || r.meta.keyRequest.reference === reference); + }); + return result; + }, + 'chelonia/contract/waitingForKeyShareTo': function (contractIDOrState, requestingContractID, reference) { + if (typeof contractIDOrState === 'string') { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const result = contractIDOrState._volatile?.pendingKeyRequests + ?.filter((r) => { + return r && (!requestingContractID || r.contractID === requestingContractID) && (!reference || r.reference === reference); + }) + ?.map(({ name }) => name); + if (!result?.length) + return null; + return result; + }, + 'chelonia/contract/successfulKeySharesByContractID': function (contractIDOrState, requestingContractID) { + if (typeof contractIDOrState === 'string') { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const keyShares = Object.values(contractIDOrState._vm.keyshares || {}); + if (!keyShares?.length) + return; + const result = Object.create(null); + keyShares.forEach((kS) => { + if (!kS.success) + return; + if (requestingContractID && kS.contractID !== requestingContractID) + return; + if (!result[kS.contractID]) + result[kS.contractID] = []; + result[kS.contractID].push({ height: kS.height, hash: kS.hash }); + }); + Object.keys(result).forEach(cID => { + result[cID].sort((a, b) => { + return b.height - a.height; + }); + }); + return result; + }, + 'chelonia/contract/hasKeysToPerformOperation': function (contractIDOrState, operation) { + if (typeof contractIDOrState === 'string') { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const op = (operation !== '*') ? [operation] : operation; + return !!(0, utils_js_1.findSuitableSecretKeyId)(contractIDOrState, op, ['sig']); + }, + // Did sourceContractIDOrState receive an OP_KEY_SHARE to perform the given + // operation on contractIDOrState? + 'chelonia/contract/receivedKeysToPerformOperation': function (sourceContractIDOrState, contractIDOrState, operation) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + if (typeof sourceContractIDOrState === 'string') { + sourceContractIDOrState = rootState[sourceContractIDOrState]; + } + if (typeof contractIDOrState === 'string') { + contractIDOrState = rootState[contractIDOrState]; + } + const op = (operation !== '*') ? [operation] : operation; + const keyId = (0, utils_js_1.findSuitableSecretKeyId)(contractIDOrState, op, ['sig']); + return sourceContractIDOrState?._vm?.sharedKeyIds?.some((sK) => sK.id === keyId); + }, + 'chelonia/contract/currentKeyIdByName': function (contractIDOrState, name, requireSecretKey) { + if (typeof contractIDOrState === 'string') { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const currentKeyId = (0, utils_js_1.findKeyIdByName)(contractIDOrState, name); + if (requireSecretKey && !(0, sbp_1.default)('chelonia/haveSecretKey', currentKeyId)) { + return; + } + return currentKeyId; + }, + 'chelonia/contract/foreignKeysByContractID': function (contractIDOrState, foreignContractID) { + if (typeof contractIDOrState === 'string') { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + return (0, utils_js_1.findForeignKeysByContractID)(contractIDOrState, foreignContractID); + }, + 'chelonia/contract/historicalKeyIdsByName': function (contractIDOrState, name) { + if (typeof contractIDOrState === 'string') { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const currentKeyId = (0, utils_js_1.findKeyIdByName)(contractIDOrState, name); + const revokedKeyIds = (0, utils_js_1.findRevokedKeyIdsByName)(contractIDOrState, name); + return currentKeyId ? [currentKeyId, ...revokedKeyIds] : revokedKeyIds; + }, + 'chelonia/contract/suitableSigningKey': function (contractIDOrState, permissions, purposes, ringLevel, allowedActions) { + if (typeof contractIDOrState === 'string') { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const keyId = (0, utils_js_1.findSuitableSecretKeyId)(contractIDOrState, permissions, purposes, ringLevel, allowedActions); + return keyId; + }, + 'chelonia/contract/setPendingKeyRevocation': function (contractID, names) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + const state = rootState[contractID]; + if (!state._volatile) + this.config.reactiveSet(state, '_volatile', Object.create(null)); + if (!state._volatile.pendingKeyRevocations) + this.config.reactiveSet(state._volatile, 'pendingKeyRevocations', Object.create(null)); + for (const name of names) { + const keyId = (0, utils_js_1.findKeyIdByName)(state, name); + if (keyId) { + this.config.reactiveSet(state._volatile.pendingKeyRevocations, keyId, true); + } + else { + console.warn('[setPendingKeyRevocation] Unable to find keyId for name', { contractID, name }); + } + } + }, + 'chelonia/shelterAuthorizationHeader'(contractID) { + return utils_js_1.buildShelterAuthorizationHeader.call(this, contractID); + }, + // The purpose of the 'chelonia/crypto/*' selectors is so that they can be called + // from contracts without including the crypto code (i.e., importing crypto.js) + // This function takes a function as a parameter that returns a string + // It does not a string directly to prevent accidentally logging the value, + // which is a secret + 'chelonia/crypto/keyId': (inKey) => { + return (0, crypto_1.keyId)(inKey.valueOf()); + }, + // TODO: allow connecting to multiple servers at once + 'chelonia/connect': function (options = {}) { + if (!this.config.connectionURL) + throw new Error('config.connectionURL missing'); + if (!this.config.connectionOptions) + throw new Error('config.connectionOptions missing'); + if (this.pubsub) { + this.pubsub.destroy(); + } + let pubsubURL = this.config.connectionURL; + if (process.env.NODE_ENV === 'development') { + // This is temporarily used in development mode to help the server improve + // its console output until we have a better solution. Do not use for auth. + pubsubURL += `?debugID=${(0, turtledash_1.randomHexString)(6)}`; + } + if (this.pubsub) { + (0, sbp_1.default)('chelonia/private/stopClockSync'); + } + (0, sbp_1.default)('chelonia/private/startClockSync'); + this.pubsub = (0, index_js_1.createClient)(pubsubURL, { + ...this.config.connectionOptions, + handlers: { + ...options.handlers, + // Every time we get a REQUEST_TYPE.SUB response, which happens for + // 'new' subscriptions as well as every time the connection is reset + 'subscription-succeeded': function (event) { + const { channelID } = event.detail; + // The check below is needed because we could have unsubscribed since + // requesting a subscription from the server. In that case, we don't + // need to call `sync`. + if (this.subscriptionSet.has(channelID)) { + // For new subscriptions, some messages could have been lost + // between the time the subscription was requested and it was + // actually set up. In these cases, force sync contracts to get them + // updated. + (0, sbp_1.default)('chelonia/private/out/sync', channelID, { force: true }).catch((err) => { + console.warn(`[chelonia] Syncing contract ${channelID} failed: ${err.message}`); + }); + } + options.handlers?.['subscription-succeeded']?.call(this, event); + } + }, + // Map message handlers to transparently handle encryption and signatures + messageHandlers: { + ...(Object.fromEntries(Object.entries(options.messageHandlers || {}).map(([k, v]) => { + switch (k) { + case index_js_1.NOTIFICATION_TYPE.PUB: + return [k, (msg) => { + if (!msg.channelID) { + console.info('[chelonia] Discarding pub event without channelID'); + return; + } + if (!this.subscriptionSet.has(msg.channelID)) { + console.info(`[chelonia] Discarding pub event for ${msg.channelID} because it's not in the current subscriptionSet`); + return; + } + (0, sbp_1.default)('chelonia/queueInvocation', msg.channelID, () => { + v.call(this.pubsub, parseEncryptedOrUnencryptedMessage(this, { + contractID: msg.channelID, + serializedData: msg.data + })); + }).catch((e) => { + console.error(`[chelonia] Error processing pub event for ${msg.channelID}`, e); + }); + }]; + case index_js_1.NOTIFICATION_TYPE.KV: + return [k, (msg) => { + if (!msg.channelID || !msg.key) { + console.info('[chelonia] Discarding kv event without channelID or key'); + return; + } + if (!this.subscriptionSet.has(msg.channelID)) { + console.info(`[chelonia] Discarding kv event for ${msg.channelID} because it's not in the current subscriptionSet`); + return; + } + (0, sbp_1.default)('chelonia/queueInvocation', msg.channelID, () => { + v.call(this.pubsub, [msg.key, parseEncryptedOrUnencryptedMessage(this, { + contractID: msg.channelID, + meta: msg.key, + serializedData: JSON.parse(Buffer.from(msg.data).toString()) + })]); + }).catch((e) => { + console.error(`[chelonia] Error processing kv event for ${msg.channelID} and key ${msg.key}`, msg, e); + }); + }]; + case index_js_1.NOTIFICATION_TYPE.DELETION: + return [k, (msg) => v.call(this.pubsub, msg.data)]; + default: + return [k, v]; + } + }))), + [index_js_1.NOTIFICATION_TYPE.ENTRY](msg) { + // We MUST use 'chelonia/private/in/enqueueHandleEvent' to ensure handleEvent() + // is called AFTER any currently-running calls to 'chelonia/private/out/sync' + // to prevent gi.db from throwing "bad previousHEAD" errors. + // Calling via SBP also makes it simple to implement 'test/backend.cjs' + const { contractID } = SPMessage_js_1.SPMessage.deserializeHEAD(msg.data); + (0, sbp_1.default)('chelonia/private/in/enqueueHandleEvent', contractID, msg.data); + } + } + }); + if (!this.contractsModifiedListener) { + // Keep pubsub in sync (logged into the right "rooms") with 'state.contracts' + this.contractsModifiedListener = () => (0, sbp_1.default)('chelonia/pubsub/update'); + (0, sbp_1.default)('okTurtles.events/on', events_js_1.CONTRACTS_MODIFIED, this.contractsModifiedListener); + } + return this.pubsub; + }, + // This selector is defined primarily for ingesting web push notifications, + // although it can be used as a general-purpose API to process events received + // from other external sources that are not managed by Chelonia itself (i.e. sources + // other than the Chelonia-managed websocket connection and RESTful API). + 'chelonia/handleEvent': async function (event) { + const { contractID } = SPMessage_js_1.SPMessage.deserializeHEAD(event); + return await (0, sbp_1.default)('chelonia/private/in/enqueueHandleEvent', contractID, event); + }, + 'chelonia/defineContract': function (contract) { + if (!exports.ACTION_REGEX.exec(contract.name)) + throw new Error(`bad contract name: ${contract.name}`); + if (!contract.metadata) + contract.metadata = { validate() { }, create: () => ({}) }; + if (!contract.getters) + contract.getters = {}; + contract.state = (contractID) => (0, sbp_1.default)(this.config.stateSelector)[contractID]; + contract.manifest = this.defContractManifest; + contract.sbp = this.defContractSBP; + this.defContractSelectors = []; + this.defContract = contract; + this.defContractSelectors.push(...(0, sbp_1.default)('sbp/selectors/register', { + // expose getters for Vuex integration and other conveniences + [`${contract.manifest}/${contract.name}/getters`]: () => contract.getters, + // 2 ways to cause sideEffects to happen: by defining a sideEffect function in the + // contract, or by calling /pushSideEffect w/async SBP call. Can also do both. + [`${contract.manifest}/${contract.name}/pushSideEffect`]: (contractID, asyncSbpCall) => { + // if this version of the contract is pushing a sideEffect to a function defined by the + // contract itself, make sure that it calls the same version of the sideEffect + const [sel] = asyncSbpCall; + if (sel.startsWith(contract.name + '/')) { + asyncSbpCall[0] = `${contract.manifest}/${sel}`; + } + this.sideEffectStack(contractID).push(asyncSbpCall); + } + })); + for (const action in contract.actions) { + contractNameFromAction(action); // ensure actions are appropriately named + this.whitelistedActions[action] = true; + // TODO: automatically generate send actions here using `${action}/send` + // allow the specification of: + // - the optype (e.g. OP_ACTION_(UN)ENCRYPTED) + // - a localized error message + // - whatever keys should be passed in as well + // base it off of the design of encryptedAction() + this.defContractSelectors.push(...(0, sbp_1.default)('sbp/selectors/register', { + [`${contract.manifest}/${action}/process`]: async (message, state) => { + const { meta, data, contractID } = message; + // TODO: optimize so that you're creating a proxy object only when needed + // TODO: Note: when sandboxing contracts, contracts may not have + // access to the state directly, meaning that modifications would need + // to be re-applied + state = state || contract.state(contractID); + const gProxy = gettersProxy(state, contract.getters); + // These `await` are here to help with sandboxing in the future + // Sandboxing may mean that contracts are executed in another context + // (e.g., a worker), which would require asynchronous communication + // between Chelonia and the contract. + // Even though these are asynchronous calls, contracts should not + // call side effects from these functions + await contract.metadata.validate(meta, { state, ...gProxy, contractID }); + await contract.actions[action].validate(data, { state, ...gProxy, meta, message, contractID }); + // it's possible that the sideEffect stack got filled up by the call to `processMessage` from + // a call to `publishEvent` (when an outgoing message is being sent). + this.sideEffectStacks[contractID] = []; + await contract.actions[action].process(message, { state, ...gProxy }); + }, + // 'mutation' is an object that's similar to 'message', but not identical + [`${contract.manifest}/${action}/sideEffect`]: async (mutation, state) => { + if (contract.actions[action].sideEffect) { + state = state || contract.state(mutation.contractID); + if (!state) { + console.warn(`[${contract.manifest}/${action}/sideEffect]: Skipping side-effect since there is no contract state for contract ${mutation.contractID}`); + return; + } + // TODO: Copy to simulate a sandbox boundary without direct access + // as well as to enforce the rule that side-effects must not mutate + // state + const stateCopy = (0, turtledash_1.cloneDeep)(state); + const gProxy = gettersProxy(stateCopy, contract.getters); + await contract.actions[action].sideEffect(mutation, { state: stateCopy, ...gProxy }); + } + // since both /process and /sideEffect could call /pushSideEffect, we make sure + // to process the side effects on the stack after calling /sideEffect. + const sideEffects = this.sideEffectStack(mutation.contractID); + while (sideEffects.length > 0) { + const sideEffect = sideEffects.shift(); + try { + await contract.sbp(...sideEffect); + } + catch (e_) { + const e = e_; + console.error(`[chelonia] ERROR: '${e.name}' ${e.message}, for pushed sideEffect of ${mutation.description}:`, sideEffect); + this.sideEffectStacks[mutation.contractID] = []; // clear the side effects + throw e; + } + } + } + })); + } + for (const method in contract.methods) { + this.defContractSelectors.push(...(0, sbp_1.default)('sbp/selectors/register', { + [`${contract.manifest}/${method}`]: contract.methods[method] + })); + } + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.CONTRACT_REGISTERED, contract); + }, + 'chelonia/queueInvocation': (contractID, sbpInvocation) => { + // We maintain two queues, contractID, used for internal events (i.e., + // from chelonia) and public:contractID, used for operations that need to + // be done after all the current internal events (if any) have + // finished processing. + // Once all of the current internal events (in the contractID queue) + // have completed, the operation requested is put into the public queue. + // The reason for maintaining two different queues is to provide users + // a way to run operations after internal operations have been processed + // (for example, a side-effect might call queueInvocation to do work + // after the current and future events have been processed), without the + // work in these user-functions blocking Chelonia and prventing it from + // processing events. + // For example, a contract could have an action called + // 'example/setProfilePicture'. The side-effect could look like this: + // + // sideEffect ({ data, contractID }, { state }) { + // const profilePictureUrl = data.url + // + // sbp('chelonia/queueInvocation', contractID, () => { + // const rootState = sbp('state/vuex/state') + // if (rootState[contractID].profilePictureUrl !== profilePictureUrl) + // return // The profile picture changed, so we do nothing + // + // // The following could take a long time. We want Chelonia + // // to still work and process events as normal. + // return this.config.fetch(profilePictureUrl).then(doSomeWorkWithTheFile) + // }) + // } + return (0, sbp_1.default)('chelonia/private/queueEvent', contractID, ['chelonia/private/noop']).then(() => (0, sbp_1.default)('chelonia/private/queueEvent', 'public:' + contractID, sbpInvocation)); + }, + 'chelonia/begin': async (...invocations) => { + for (const invocation of invocations) { + await (0, sbp_1.default)(...invocation); + } + }, + // call this manually to resubscribe/unsubscribe from contracts as needed + // if you are using a custom stateSelector and reload the state (e.g. upon login) + 'chelonia/pubsub/update': function () { + const client = this.pubsub; + const subscribedIDs = [...client.subscriptionSet]; + const currentIDs = Array.from(this.subscriptionSet); + const leaveSubscribed = (0, turtledash_1.intersection)(subscribedIDs, currentIDs); + const toUnsubscribe = (0, turtledash_1.difference)(subscribedIDs, leaveSubscribed); + const toSubscribe = (0, turtledash_1.difference)(currentIDs, leaveSubscribed); + // There is currently no need to tell other clients about our sub/unsubscriptions. + try { + for (const contractID of toUnsubscribe) { + client.unsub(contractID); + } + for (const contractID of toSubscribe) { + client.sub(contractID); + } + } + catch (e) { + console.error(`[chelonia] pubsub/update: error ${e.name}: ${e.message}`, { toUnsubscribe, toSubscribe }, e); + this.config.hooks.pubsubError?.(e, client); + } + }, + // resolves when all pending actions for these contractID(s) finish + 'chelonia/contract/wait': function (contractIDs) { + const listOfIds = contractIDs + ? (typeof contractIDs === 'string' ? [contractIDs] : contractIDs) + : Object.keys((0, sbp_1.default)(this.config.stateSelector).contracts); + return Promise.all(listOfIds.flatMap(cID => { + return (0, sbp_1.default)('chelonia/queueInvocation', cID, ['chelonia/private/noop']); + })); + }, + // resolves when all pending *writes* for these contractID(s) finish + 'chelonia/contract/waitPublish': function (contractIDs) { + const listOfIds = contractIDs + ? (typeof contractIDs === 'string' ? [contractIDs] : contractIDs) + : Object.keys((0, sbp_1.default)(this.config.stateSelector).contracts); + return Promise.all(listOfIds.flatMap(cID => { + return (0, sbp_1.default)('chelonia/private/queueEvent', `publish:${cID}`, ['chelonia/private/noop']); + })); + }, + // 'chelonia/contract' - selectors related to injecting remote data and monitoring contracts + // TODO: add an optional parameter to "retain" the contract (see #828) + // eslint-disable-next-line require-await + 'chelonia/contract/sync': async function (contractIDs, params) { + // The exposed `chelonia/contract/sync` selector is meant for users of + // Chelonia and not for internal use within Chelonia. + // It should only be called after `/retain` where needed (for example, when + // starting up Chelonia with a saved state) + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs; + // Verify that there's a valid reference count + listOfIds.forEach((id) => { + if (utils_js_1.checkCanBeGarbageCollected.call(this, id)) { + if (process.env.CI) { + Promise.reject(new Error('[chelonia] Missing reference count for contract ' + id)); + } + console.error('[chelonia] Missing reference count for contract ' + id); + throw new Error('Missing reference count for contract'); + } + }); + // Call the internal sync selector. `force` is always true as using `/sync` + // besides internally is only needed to force sync a contract + return (0, sbp_1.default)('chelonia/private/out/sync', listOfIds, { ...params, force: true }); + }, + 'chelonia/contract/isSyncing': function (contractID, { firstSync = false } = {}) { + const isSyncing = !!this.currentSyncs[contractID]; + return firstSync + ? isSyncing && this.currentSyncs[contractID].firstSync + : isSyncing; + }, + 'chelonia/contract/currentSyncs': function () { + return Object.keys(this.currentSyncs); + }, + // Because `/remove` is done asynchronously and a contract might be removed + // much later than when the call to remove was made, an optional callback + // can be passed to verify whether to proceed with removal. This is used as + // part of the `/release` mechanism to prevent removing contracts that have + // acquired new references since the call to `/remove`. + 'chelonia/contract/remove': function (contractIDs, { confirmRemovalCallback, permanent } = {}) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs; + return Promise.all(listOfIds.map(contractID => { + if (!rootState?.contracts?.[contractID]) { + return undefined; + } + return (0, sbp_1.default)('chelonia/private/queueEvent', contractID, () => { + // This allows us to double-check that the contract is meant to be + // removed, as circumstances could have changed from the time remove + // was called and this function is executed. For example, `/release` + // makes a synchronous check, but processing of other events since + // require this to be re-checked (in this case, for reference counts). + if (confirmRemovalCallback && !confirmRemovalCallback(contractID)) { + return; + } + const rootState = (0, sbp_1.default)(this.config.stateSelector); + const fkContractIDs = Array.from(new Set(Object.values(rootState[contractID]?._vm?.authorizedKeys ?? {}).filter((k) => { + return !!k.foreignKey; + }).map((k) => { + try { + const fkUrl = new URL(k.foreignKey); + return fkUrl.pathname; + } + catch { + return undefined; + } + }).filter(Boolean))); + (0, sbp_1.default)('chelonia/private/removeImmediately', contractID, { permanent }); + if (fkContractIDs.length) { + // Attempt to release all contracts that are being monitored for + // foreign keys + (0, sbp_1.default)('chelonia/contract/release', fkContractIDs, { try: true }).catch((e) => { + console.error('[chelonia] Error attempting to release foreign key contracts', e); + }); + } + }); + })); + }, + 'chelonia/contract/retain': async function (contractIDs, params) { + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs; + const rootState = (0, sbp_1.default)(this.config.stateSelector); + if (listOfIds.length === 0) + return Promise.resolve(); + const checkIfDeleted = (id) => { + // Contract has been permanently deleted + if (rootState.contracts[id] === null) { + console.error('[chelonia/contract/retain] Called /retain on permanently deleted contract.', id); + throw new errors_js_1.ChelErrorResourceGone('Unable to retain permanently deleted contract ' + id); + } + }; + if (!params?.ephemeral) { + listOfIds.forEach((id) => { + checkIfDeleted(id); + if (!(0, turtledash_1.has)(rootState.contracts, id)) { + this.config.reactiveSet(rootState.contracts, id, Object.create(null)); + } + this.config.reactiveSet(rootState.contracts[id], 'references', (rootState.contracts[id].references ?? 0) + 1); + }); + } + else { + listOfIds.forEach((id) => { + checkIfDeleted(id); + if (!(0, turtledash_1.has)(this.ephemeralReferenceCount, id)) { + this.ephemeralReferenceCount[id] = 1; + } + else { + this.ephemeralReferenceCount[id] = this.ephemeralReferenceCount[id] + 1; + } + }); + } + return await (0, sbp_1.default)('chelonia/private/out/sync', listOfIds); + }, + // the `try` parameter does not affect (ephemeral or persistent) reference + // counts, but rather removes a contract if the reference count is zero + // and the contract isn't being monitored for foreign keys. This parameter + // is meant mostly for internal chelonia use, so that removing or releasing + // a contract can also remove other contracts that this first contract + // was monitoring. + 'chelonia/contract/release': async function (contractIDs, params) { + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs; + const rootState = (0, sbp_1.default)(this.config.stateSelector); + if (!params?.try) { + if (!params?.ephemeral) { + listOfIds.forEach((id) => { + // Contract has been permanently deleted + if (rootState.contracts[id] === null) { + console.warn('[chelonia/contract/release] Called /release on permanently deleted contract. This has no effect.', id); + return; + } + if ((0, turtledash_1.has)(rootState.contracts, id) && (0, turtledash_1.has)(rootState.contracts[id], 'references')) { + const current = rootState.contracts[id].references; + if (current === 0) { + console.error('[chelonia/contract/release] Invalid negative reference count for', id); + if (process.env.CI) { + // If running in CI, force tests to fail + Promise.reject(new Error('Invalid negative reference count: ' + id)); + } + throw new Error('Invalid negative reference count'); + } + if (current <= 1) { + this.config.reactiveDel(rootState.contracts[id], 'references'); + } + else { + this.config.reactiveSet(rootState.contracts[id], 'references', current - 1); + } + } + else { + console.error('[chelonia/contract/release] Invalid negative reference count for', id); + if (process.env.CI) { + // If running in CI, force tests to fail + Promise.reject(new Error('Invalid negative reference count: ' + id)); + } + throw new Error('Invalid negative reference count'); + } + }); + } + else { + listOfIds.forEach((id) => { + // Contract has been permanently deleted + if (rootState.contracts[id] === null) { + console.warn('[chelonia/contract/release] Called /release on permanently deleted contract. This has no effect.', id); + return; + } + if ((0, turtledash_1.has)(this.ephemeralReferenceCount, id)) { + const current = this.ephemeralReferenceCount[id] ?? 0; + if (current <= 1) { + delete this.ephemeralReferenceCount[id]; + } + else { + this.ephemeralReferenceCount[id] = current - 1; + } + } + else { + console.error('[chelonia/contract/release] Invalid negative ephemeral reference count for', id); + if (process.env.CI) { + // If running in CI, force tests to fail + Promise.reject(new Error('Invalid negative ephemeral reference count: ' + id)); + } + throw new Error('Invalid negative ephemeral reference count'); + } + }); + } + } + // This function will be called twice. The first time, it provides a list of + // candidate contracts to remove. The second time, it confirms that the + // contract is safe to remove + const boundCheckCanBeGarbageCollected = utils_js_1.checkCanBeGarbageCollected.bind(this); + const idsToRemove = listOfIds.filter(boundCheckCanBeGarbageCollected); + return idsToRemove.length ? await (0, sbp_1.default)('chelonia/contract/remove', idsToRemove, { confirmRemovalCallback: boundCheckCanBeGarbageCollected }) : undefined; + }, + 'chelonia/contract/disconnect': async function (contractID, contractIDToDisconnect) { + const state = (0, sbp_1.default)(this.config.stateSelector); + const contractState = state[contractID]; + const keyIds = Object.values(contractState._vm.authorizedKeys).filter((k) => { + return k._notAfterHeight == null && k.meta?.keyRequest?.contractID === contractIDToDisconnect; + }).map(k => k.id); + if (!keyIds.length) + return; + return await (0, sbp_1.default)('chelonia/out/keyDel', { + contractID, + contractName: contractState._vm.type, + data: keyIds, + signingKeyId: (0, utils_js_1.findSuitableSecretKeyId)(contractState, [SPMessage_js_1.SPMessage.OP_KEY_DEL], ['sig']) + }); + }, + 'chelonia/in/processMessage': function (messageOrRawMessage, state) { + const stateCopy = (0, turtledash_1.cloneDeep)(state); + const message = typeof messageOrRawMessage === 'string' ? SPMessage_js_1.SPMessage.deserialize(messageOrRawMessage, this.transientSecretKeys, stateCopy, this.config.unwrapMaybeEncryptedData) : messageOrRawMessage; + return (0, sbp_1.default)('chelonia/private/in/processMessage', message, stateCopy).then(() => stateCopy).catch((e) => { + console.warn(`chelonia/in/processMessage: reverting mutation ${message.description()}: ${message.serialize()}`, e); + return state; + }); + }, + 'chelonia/out/fetchResource': async function (cid, { code } = {}) { + const parsedCID = (0, functions_js_1.parseCID)(cid); + if (code != null) { + if (parsedCID.code !== code) { + throw new Error(`Invalid CID content type. Expected ${code}, got ${parsedCID.code}`); + } + } + // Note that chelonia.db/get (set) is a no-op for lightweight clients + // This was added for consistency (processing an event also adds it to the DB) + const local = await (0, sbp_1.default)('chelonia.db/get', cid); + // We don't verify the CID because it's already been verified when it was set + if (local != null) + return local; + const url = `${this.config.connectionURL}/file/${cid}`; + const data = await this.config.fetch(url, { signal: this.abortController.signal }).then((0, utils_js_1.handleFetchResult)('text')); + const ourHash = (0, functions_js_1.createCID)(data, parsedCID.code); + if (ourHash !== cid) { + throw new Error(`expected hash ${cid}. Got: ${ourHash}`); + } + await (0, sbp_1.default)('chelonia.db/set', cid, data); + return data; + }, + 'chelonia/out/latestHEADInfo': function (contractID) { + return this.config.fetch(`${this.config.connectionURL}/latestHEADinfo/${contractID}`, { + cache: 'no-store', + signal: this.abortController.signal + }).then((0, utils_js_1.handleFetchResult)('json')); + }, + 'chelonia/out/eventsAfter': utils_js_1.eventsAfter, + 'chelonia/out/eventsBefore': function (contractID, beforeHeight, limit, options) { + if (limit <= 0) { + console.error('[chelonia] invalid params error: "limit" needs to be positive integer'); + } + const offset = Math.max(0, beforeHeight - limit + 1); + const eventsAfterLimit = Math.min(beforeHeight + 1, limit); + return (0, sbp_1.default)('chelonia/out/eventsAfter', contractID, offset, eventsAfterLimit, undefined, options); + }, + 'chelonia/out/eventsBetween': function (contractID, startHash, endHeight, offset = 0, { stream } = { stream: true }) { + if (offset < 0) { + console.error('[chelonia] invalid params error: "offset" needs to be positive integer or zero'); + return; + } + let reader; + const s = new ReadableStream({ + start: async (controller) => { + const first = await this.config.fetch(`${this.config.connectionURL}/file/${startHash}`, { signal: this.abortController.signal }).then((0, utils_js_1.handleFetchResult)('text')); + const deserializedHEAD = SPMessage_js_1.SPMessage.deserializeHEAD(first); + if (deserializedHEAD.contractID !== contractID) { + controller.error(new Error('chelonia/out/eventsBetween: Mismatched contract ID')); + return; + } + const startOffset = Math.max(0, deserializedHEAD.head.height - offset); + const limit = endHeight - startOffset + 1; + if (limit < 1) { + controller.close(); + return; + } + reader = (0, sbp_1.default)('chelonia/out/eventsAfter', contractID, startOffset, limit).getReader(); + }, + async pull(controller) { + const { done, value } = await reader.read(); + if (done) { + controller.close(); + } + else { + controller.enqueue(value); + } + } + }); + if (stream) + return s; + // Workaround for + return (0, utils_js_1.collectEventStream)(s); + }, + 'chelonia/rootState': function () { return (0, sbp_1.default)(this.config.stateSelector); }, + 'chelonia/latestContractState': async function (contractID, options = { forceSync: false }) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + // return a copy of the state if we already have it, unless the only key that's in it is _volatile, + // in which case it means we should sync the contract to get more info. + if (rootState.contracts[contractID] === null) { + throw new errors_js_1.ChelErrorResourceGone('Permanently deleted contract ' + contractID); + } + if (!options.forceSync && rootState[contractID] && Object.keys(rootState[contractID]).some((x) => x !== '_volatile')) { + return (0, turtledash_1.cloneDeep)(rootState[contractID]); + } + let state = Object.create(null); + let contractName = rootState.contracts[contractID]?.type; + const eventsStream = (0, sbp_1.default)('chelonia/out/eventsAfter', contractID, 0, undefined, contractID); + const eventsStreamReader = eventsStream.getReader(); + if (rootState[contractID]) + state._volatile = rootState[contractID]._volatile; + for (;;) { + const { value: event, done } = await eventsStreamReader.read(); + if (done) + return state; + const stateCopy = (0, turtledash_1.cloneDeep)(state); + try { + await (0, sbp_1.default)('chelonia/private/in/processMessage', SPMessage_js_1.SPMessage.deserialize(event, this.transientSecretKeys, state, this.config.unwrapMaybeEncryptedData), state, undefined, contractName); + if (!contractName && state._vm) { + contractName = state._vm.type; + } + } + catch (e) { + console.warn(`[chelonia] latestContractState: '${e.name}': ${e.message} processing:`, event, e.stack); + if (e instanceof errors_js_1.ChelErrorUnrecoverable) + throw e; + state = stateCopy; + } + } + }, + 'chelonia/contract/state': function (contractID, height) { + const state = (0, sbp_1.default)(this.config.stateSelector)[contractID]; + const stateCopy = state && (0, turtledash_1.cloneDeep)(state); + if (stateCopy?._vm && height != null) { + // Remove keys in the future + Object.keys(stateCopy._vm.authorizedKeys).forEach(keyId => { + if (stateCopy._vm.authorizedKeys[keyId]._notBeforeHeight > height) { + delete stateCopy._vm.authorizedKeys[keyId]; + } + }); + } + return stateCopy; + }, + 'chelonia/contract/fullState': function (contractID) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + if (Array.isArray(contractID)) { + return Object.fromEntries(contractID.map(contractID => { + return [ + contractID, + { + contractState: rootState[contractID], + cheloniaState: rootState.contracts[contractID] + } + ]; + })); + } + return { + contractState: rootState[contractID], + cheloniaState: rootState.contracts[contractID] + }; + }, + // 'chelonia/out' - selectors that send data out to the server + 'chelonia/out/registerContract': async function (params) { + const { contractName, keys, hooks, publishOptions, signingKeyId, actionSigningKeyId, actionEncryptionKeyId } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contractInfo = this.manifestToContract[manifestHash]; + if (!contractInfo) + throw new Error(`contract not defined: ${contractName}`); + const signingKey = this.transientSecretKeys[signingKeyId]; + if (!signingKey) + throw new Error(`Signing key ${signingKeyId} is not defined`); + const payload = { + type: contractName, + keys + }; + const contractMsg = SPMessage_js_1.SPMessage.createV1_0({ + contractID: null, + height: 0, + op: [ + SPMessage_js_1.SPMessage.OP_CONTRACT, + (0, signedData_js_1.signedOutgoingDataWithRawKey)(signingKey, payload) + ], + manifest: manifestHash + }); + const contractID = contractMsg.hash(); + await (0, sbp_1.default)('chelonia/private/out/publishEvent', contractMsg, (params.namespaceRegistration + ? { + ...publishOptions, + headers: { + ...publishOptions?.headers, + 'shelter-namespace-registration': params.namespaceRegistration + } + } + : publishOptions), hooks && { + prepublish: hooks.prepublishContract, + postpublish: hooks.postpublishContract + }); + await (0, sbp_1.default)('chelonia/private/out/sync', contractID); + const msg = await (0, sbp_1.default)(actionEncryptionKeyId + ? 'chelonia/out/actionEncrypted' + : 'chelonia/out/actionUnencrypted', { + action: contractName, + contractID, + data: params.data, + signingKeyId: actionSigningKeyId ?? signingKeyId, + encryptionKeyId: actionEncryptionKeyId, + hooks, + publishOptions + }); + return msg; + }, + 'chelonia/out/ownResources': async function (contractID) { + if (!contractID) { + throw new TypeError('A contract ID must be provided'); + } + const response = await this.config.fetch(`${this.config.connectionURL}/ownResources`, { + method: 'GET', + signal: this.abortController.signal, + headers: new Headers([ + [ + 'authorization', + utils_js_1.buildShelterAuthorizationHeader.call(this, contractID) + ] + ]) + }); + if (!response.ok) { + console.error('Unable to fetch own resources', contractID, response.status); + throw new Error(`Unable to fetch own resources for ${contractID}: ${response.status}`); + } + return response.json(); + }, + 'chelonia/out/deleteContract': async function (contractID, credentials = {}) { + if (!contractID) { + throw new TypeError('A contract ID must be provided'); + } + if (!Array.isArray(contractID)) + contractID = [contractID]; + return await Promise.allSettled(contractID.map(async (cid) => { + const hasCredential = (0, turtledash_1.has)(credentials, cid); + const hasToken = (0, turtledash_1.has)(credentials[cid], 'token') && credentials[cid].token; + const hasBillableContractID = (0, turtledash_1.has)(credentials[cid], 'billableContractID') && credentials[cid].billableContractID; + if (!hasCredential || hasToken === hasBillableContractID) { + throw new TypeError(`Either a token or a billable contract ID must be provided for ${cid}`); + } + const response = await this.config.fetch(`${this.config.connectionURL}/deleteContract/${cid}`, { + method: 'POST', + signal: this.abortController.signal, + headers: new Headers([ + ['authorization', + hasToken + ? `bearer ${credentials[cid].token.valueOf()}` + : utils_js_1.buildShelterAuthorizationHeader.call(this, credentials[cid].billableContractID)] + ]) + }); + if (!response.ok) { + if (response.status === 404 || response.status === 410) { + console.warn('Contract appears to have been deleted already', cid, response.status); + return; + } + console.error('Unable to delete contract', cid, response.status); + throw new Error(`Unable to delete contract ${cid}: ${response.status}`); + } + })); + }, + // all of these functions will do both the creation of the SPMessage + // and the sending of it via 'chelonia/private/out/publishEvent' + 'chelonia/out/actionEncrypted': function (params) { + return outEncryptedOrUnencryptedAction.call(this, SPMessage_js_1.SPMessage.OP_ACTION_ENCRYPTED, params); + }, + 'chelonia/out/actionUnencrypted': function (params) { + return outEncryptedOrUnencryptedAction.call(this, SPMessage_js_1.SPMessage.OP_ACTION_UNENCRYPTED, params); + }, + 'chelonia/out/keyShare': async function (params) { + const { atomic, originatingContractName, originatingContractID, contractName, contractID, data, hooks, publishOptions } = params; + const originatingManifestHash = this.config.contracts.manifests[originatingContractName]; + const destinationManifestHash = this.config.contracts.manifests[contractName]; + const originatingContract = originatingContractID ? this.manifestToContract[originatingManifestHash]?.contract : undefined; + const destinationContract = this.manifestToContract[destinationManifestHash]?.contract; + if ((originatingContractID && !originatingContract) || !destinationContract) { + throw new Error('Contract name not found'); + } + const payload = data; + if (!params.signingKeyId && !params.signingKey) { + throw new TypeError('Either signingKeyId or signingKey must be specified'); + } + let msg = SPMessage_js_1.SPMessage.createV1_0({ + contractID, + op: [ + SPMessage_js_1.SPMessage.OP_KEY_SHARE, + params.signingKeyId + ? (0, signedData_js_1.signedOutgoingData)(contractID, params.signingKeyId, payload, this.transientSecretKeys) + : (0, signedData_js_1.signedOutgoingDataWithRawKey)(params.signingKey, payload) + ], + manifest: destinationManifestHash + }); + if (!atomic) { + msg = await (0, sbp_1.default)('chelonia/private/out/publishEvent', msg, publishOptions, hooks); + } + return msg; + }, + 'chelonia/out/keyAdd': async function (params) { + // TODO: For foreign keys, recalculate the key id + // TODO: Make this a noop if the key already exsits with the given permissions + const { atomic, contractID, contractName, data, hooks, publishOptions } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const state = contract.state(contractID); + const payload = data.filter((wk) => { + const k = ((0, encryptedData_js_1.isEncryptedData)(wk) ? wk.valueOf() : wk); + if ((0, turtledash_1.has)(state._vm.authorizedKeys, k.id)) { + if (state._vm.authorizedKeys[k.id]._notAfterHeight == null) { + // Can't add a key that exists + return false; + } + } + return true; + }); + if (payload.length === 0) + return; + let msg = SPMessage_js_1.SPMessage.createV1_0({ + contractID, + op: [ + SPMessage_js_1.SPMessage.OP_KEY_ADD, + (0, signedData_js_1.signedOutgoingData)(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + if (!atomic) { + msg = await (0, sbp_1.default)('chelonia/private/out/publishEvent', msg, publishOptions, hooks); + } + return msg; + }, + 'chelonia/out/keyDel': async function (params) { + const { atomic, contractID, contractName, data, hooks, publishOptions } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const state = contract.state(contractID); + const payload = data.map((keyId) => { + if ((0, encryptedData_js_1.isEncryptedData)(keyId)) + return keyId; + if (!(0, turtledash_1.has)(state._vm.authorizedKeys, keyId) || state._vm.authorizedKeys[keyId]._notAfterHeight != null) + return undefined; + if (state._vm.authorizedKeys[keyId]._private) { + return (0, encryptedData_js_1.encryptedOutgoingData)(contractID, state._vm.authorizedKeys[keyId]._private, keyId); + } + else { + return keyId; + } + }).filter(Boolean); + let msg = SPMessage_js_1.SPMessage.createV1_0({ + contractID, + op: [ + SPMessage_js_1.SPMessage.OP_KEY_DEL, + (0, signedData_js_1.signedOutgoingData)(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + if (!atomic) { + msg = await (0, sbp_1.default)('chelonia/private/out/publishEvent', msg, publishOptions, hooks); + } + return msg; + }, + 'chelonia/out/keyUpdate': async function (params) { + const { atomic, contractID, contractName, data, hooks, publishOptions } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const state = contract.state(contractID); + const payload = data.map((key) => { + if ((0, encryptedData_js_1.isEncryptedData)(key)) + return key; + const { oldKeyId } = key; + if (state._vm.authorizedKeys[oldKeyId]._private) { + return (0, encryptedData_js_1.encryptedOutgoingData)(contractID, state._vm.authorizedKeys[oldKeyId]._private, key); + } + else { + return key; + } + }); + let msg = SPMessage_js_1.SPMessage.createV1_0({ + contractID, + op: [ + SPMessage_js_1.SPMessage.OP_KEY_UPDATE, + (0, signedData_js_1.signedOutgoingData)(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + if (!atomic) { + msg = await (0, sbp_1.default)('chelonia/private/out/publishEvent', msg, publishOptions, hooks); + } + return msg; + }, + 'chelonia/out/keyRequest': async function (params) { + const { originatingContractID, originatingContractName, contractID, contractName, hooks, publishOptions, innerSigningKeyId, encryptionKeyId, innerEncryptionKeyId, encryptKeyRequestMetadata, reference } = params; + // `encryptKeyRequestMetadata` is optional because it could be desirable + // sometimes to allow anyone to audit OP_KEY_REQUEST and OP_KEY_SHARE + // operations. If `encryptKeyRequestMetadata` were always true, it would + // be harder in these situations to see interactions between two contracts. + const manifestHash = this.config.contracts.manifests[contractName]; + const originatingManifestHash = this.config.contracts.manifests[originatingContractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + const originatingContract = this.manifestToContract[originatingManifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const rootState = (0, sbp_1.default)(this.config.stateSelector); + try { + await (0, sbp_1.default)('chelonia/contract/retain', contractID, { ephemeral: true }); + const state = contract.state(contractID); + const originatingState = originatingContract.state(originatingContractID); + const havePendingKeyRequest = Object.values(originatingState._vm.authorizedKeys).findIndex((k) => { + return k._notAfterHeight == null && k.meta?.keyRequest?.contractID === contractID && state?._volatile?.pendingKeyRequests?.some(pkr => pkr.name === k.name); + }) !== -1; + // If there's a pending key request for this contract, return + if (havePendingKeyRequest) { + return; + } + const keyRequestReplyKey = (0, crypto_1.keygen)(crypto_1.EDWARDS25519SHA512BATCH); + const keyRequestReplyKeyId = (0, crypto_1.keyId)(keyRequestReplyKey); + const keyRequestReplyKeyP = (0, crypto_1.serializeKey)(keyRequestReplyKey, false); + const keyRequestReplyKeyS = (0, crypto_1.serializeKey)(keyRequestReplyKey, true); + const signingKeyId = (0, utils_js_1.findSuitableSecretKeyId)(originatingState, [SPMessage_js_1.SPMessage.OP_KEY_ADD], ['sig']); + if (!signingKeyId) { + throw (0, errors_js_1.ChelErrorUnexpected)(`Unable to send key request. Originating contract is missing a key with OP_KEY_ADD permission. contractID=${contractID} originatingContractID=${originatingContractID}`); + } + const keyAddOp = () => (0, sbp_1.default)('chelonia/out/keyAdd', { + contractID: originatingContractID, + contractName: originatingContractName, + data: [{ + id: keyRequestReplyKeyId, + name: '#krrk-' + keyRequestReplyKeyId, + purpose: ['sig'], + ringLevel: Number.MAX_SAFE_INTEGER, + permissions: params.permissions === '*' + ? '*' + : Array.isArray(params.permissions) + ? [...params.permissions, SPMessage_js_1.SPMessage.OP_KEY_SHARE] + : [SPMessage_js_1.SPMessage.OP_KEY_SHARE], + allowedActions: params.allowedActions, + meta: { + private: { + content: (0, encryptedData_js_1.encryptedOutgoingData)(originatingContractID, encryptionKeyId, keyRequestReplyKeyS), + shareable: false + }, + keyRequest: { + ...(reference && { reference: encryptKeyRequestMetadata ? (0, encryptedData_js_1.encryptedOutgoingData)(originatingContractID, encryptionKeyId, reference) : reference }), + contractID: encryptKeyRequestMetadata ? (0, encryptedData_js_1.encryptedOutgoingData)(originatingContractID, encryptionKeyId, contractID) : contractID + } + }, + data: keyRequestReplyKeyP + }], + signingKeyId + }).catch((e) => { + console.error(`[chelonia] Error sending OP_KEY_ADD for ${originatingContractID} during key request to ${contractID}`, e); + throw e; + }); + const payload = { + contractID: originatingContractID, + height: rootState.contracts[originatingContractID].height, + replyWith: (0, signedData_js_1.signedOutgoingData)(originatingContractID, innerSigningKeyId, { + encryptionKeyId, + responseKey: (0, encryptedData_js_1.encryptedOutgoingData)(contractID, innerEncryptionKeyId, keyRequestReplyKeyS) + }, this.transientSecretKeys), + request: '*' + }; + let msg = SPMessage_js_1.SPMessage.createV1_0({ + contractID, + op: [ + SPMessage_js_1.SPMessage.OP_KEY_REQUEST, + (0, signedData_js_1.signedOutgoingData)(contractID, params.signingKeyId, encryptKeyRequestMetadata + ? (0, encryptedData_js_1.encryptedOutgoingData)(contractID, innerEncryptionKeyId, payload) + : payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + msg = await (0, sbp_1.default)('chelonia/private/out/publishEvent', msg, publishOptions, { + ...hooks, + // We ensure that both messages are placed into the publish queue + prepublish: (...args) => { + return keyAddOp().then(() => hooks?.prepublish?.(...args)); + } + }); + return msg; + } + finally { + await (0, sbp_1.default)('chelonia/contract/release', contractID, { ephemeral: true }); + } + }, + 'chelonia/out/keyRequestResponse': async function (params) { + const { atomic, contractID, contractName, data, hooks, publishOptions } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const payload = data; + let message = SPMessage_js_1.SPMessage.createV1_0({ + contractID, + op: [ + SPMessage_js_1.SPMessage.OP_KEY_REQUEST_SEEN, + (0, signedData_js_1.signedOutgoingData)(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + if (!atomic) { + message = await (0, sbp_1.default)('chelonia/private/out/publishEvent', message, publishOptions, hooks); + } + return message; + }, + 'chelonia/out/atomic': async function (params) { + const { contractID, contractName, data, hooks, publishOptions } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const payload = (await Promise.all(data.map(([selector, opParams]) => { + if (!['chelonia/out/actionEncrypted', 'chelonia/out/actionUnencrypted', 'chelonia/out/keyAdd', 'chelonia/out/keyDel', 'chelonia/out/keyUpdate', 'chelonia/out/keyRequestResponse', 'chelonia/out/keyShare'].includes(selector)) { + throw new Error('Selector not allowed in OP_ATOMIC: ' + selector); + } + return (0, sbp_1.default)(selector, { ...opParams, ...params, data: opParams.data, atomic: true }); + }))).flat().filter(Boolean).map((msg) => { + return [msg.opType(), msg.opValue()]; + }); + let msg = SPMessage_js_1.SPMessage.createV1_0({ + contractID, + op: [ + SPMessage_js_1.SPMessage.OP_ATOMIC, + (0, signedData_js_1.signedOutgoingData)(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + msg = await (0, sbp_1.default)('chelonia/private/out/publishEvent', msg, publishOptions, hooks); + return msg; + }, + 'chelonia/out/protocolUpgrade': async function () { + }, + 'chelonia/out/propSet': async function () { + }, + 'chelonia/out/propDel': async function () { + }, + 'chelonia/out/encryptedOrUnencryptedPubMessage': function ({ contractID, innerSigningKeyId, encryptionKeyId, signingKeyId, data }) { + const serializedData = outputEncryptedOrUnencryptedMessage.call(this, { + contractID, + innerSigningKeyId, + encryptionKeyId, + signingKeyId, + data + }); + this.pubsub.pub(contractID, serializedData); + }, + // Note: This is a bare-bones function designed for precise control. In many + // situations, the `chelonia/kv/queuedSet` selector (in chelonia-utils.js) + // will be simpler and more appropriate to use. + // In most situations, you want to use some queuing strategy (which this + // selector doesn't provide) alongside writing to the KV store. Therefore, as + // a general rule, you shouldn't be calling this selector directly unless + // you're building a utility library or if you have very specific needs. In + // this case, see if `chelonia/kv/queuedSet` covers your needs. + // `data` is allowed to be falsy, in which case a fetch will occur first and + // the `onconflict` handler will be called. + 'chelonia/kv/set': async function (contractID, key, data, { ifMatch, innerSigningKeyId, encryptionKeyId, signingKeyId, maxAttempts, onconflict }) { + maxAttempts = maxAttempts ?? 3; + const url = `${this.config.connectionURL}/kv/${encodeURIComponent(contractID)}/${encodeURIComponent(key)}`; + const hasOnconflict = typeof onconflict === 'function'; + let response; + // The `resolveData` function is tasked with computing merged data, as in + // merging the existing stored values (after a conflict or initial fetch) + // and new data. The return value indicates whether there should be a new + // attempt at storing updated data (if `true`) or not (if `false`) + const resolveData = async () => { + let currentValue; + // Rationale: + // * response.ok could be the result of `GET` (no initial data) + // * 409 indicates a conflict because the height used is too old + // * 412 indicates a conflict (precondition failed) because the data + // on the KV store have been updated / is not what we expected + // All of these situations should trigger parsing the respinse and + // conlict resolution + if (response.ok || response.status === 409 || response.status === 412) { + const serializedDataText = await response.text(); + // We can get 409 even if there's no data on the server. We still need + // to call `onconflict` in this case, but we don't need to attempt to + // parse the response. + // This prevents this from failing in such cases, which can result in + // race conditions and data not being properly initialised. + // See + currentValue = serializedDataText + ? parseEncryptedOrUnencryptedMessage(this, { + contractID, + serializedData: JSON.parse(serializedDataText), + meta: key + }) + : undefined; + // Rationale: 404 and 410 both indicate that the store key doesn't exist. + // These are not treated as errors since we could still set the value. + } + else if (response.status !== 404 && response.status !== 410) { + throw new errors_js_1.ChelErrorUnexpectedHttpResponseCode('[kv/set] Invalid response code: ' + response.status); + } + const result = await onconflict({ + contractID, + key, + failedData: data, + status: response.status, + // If no x-cid or etag header was returned, `ifMatch` would likely be + // returned as undefined, which will then use the `''` fallback value + // when writing. This allows 404 / 410 responses to work even if no + // etag is explicitly given + etag: response.headers.get('x-cid') || response.headers.get('etag'), + get currentData() { + return currentValue?.data; + }, + currentValue + }); + if (!result) + return false; + data = result[0]; + ifMatch = result[1]; + return true; + }; + for (;;) { + if (data !== undefined) { + const serializedData = outputEncryptedOrUnencryptedMessage.call(this, { + contractID, + innerSigningKeyId, + encryptionKeyId, + signingKeyId, + data: data, + meta: key + }); + response = await this.config.fetch(url, { + headers: new Headers([[ + 'authorization', utils_js_1.buildShelterAuthorizationHeader.call(this, contractID) + ], [ + 'if-match', ifMatch || '""' + ] + ]), + method: 'POST', + body: JSON.stringify(serializedData), + signal: this.abortController.signal + }); + } + else { + if (!hasOnconflict) { + throw TypeError('onconflict required with empty data'); + } + // If no initial data provided, perform a GET `fetch` to get the current + // data and CID. Then, `onconflict` will be used to merge the current + // and new data. + response = await this.config.fetch(url, { + headers: new Headers([[ + 'authorization', utils_js_1.buildShelterAuthorizationHeader.call(this, contractID) + ]]), + signal: this.abortController.signal + }); + // This is only for the initial case; the logic is replicated below + // for subsequent iterations that require conflic resolution. + if (await resolveData()) { + continue; + } + else { + break; + } + } + if (!response.ok) { + // Rationale: 409 and 412 indicate conflict resolution is needed + if (response.status === 409 || response.status === 412) { + if (--maxAttempts <= 0) { + throw new Error('kv/set conflict setting KV value'); + } + // Only retry if an onconflict handler exists to potentially resolve it + await (0, turtledash_1.delay)((0, turtledash_1.randomIntFromRange)(0, 1500)); + if (hasOnconflict) { + if (await resolveData()) { + continue; + } + else { + break; + } + } + else { + // Can't resolve automatically if there's no conflict handler + throw new Error(`kv/set failed with status ${response.status} and no onconflict handler was provided`); + } + } + throw new errors_js_1.ChelErrorUnexpectedHttpResponseCode('kv/set invalid response status: ' + response.status); + } + break; + } + }, + 'chelonia/kv/get': async function (contractID, key) { + const response = await this.config.fetch(`${this.config.connectionURL}/kv/${encodeURIComponent(contractID)}/${encodeURIComponent(key)}`, { + headers: new Headers([[ + 'authorization', utils_js_1.buildShelterAuthorizationHeader.call(this, contractID) + ]]), + signal: this.abortController.signal + }); + if (response.status === 404) { + return null; + } + if (!response.ok) { + throw new Error('Invalid response status: ' + response.status); + } + const data = await response.json(); + return parseEncryptedOrUnencryptedMessage(this, { + contractID, + serializedData: data, + meta: key + }); + }, + // To set filters for a contract, call with `filter` set to an array of KV + // keys to receive updates for over the WebSocket. An empty array means that + // no KV updates will be sent. + // Calling with a single argument (the contract ID) will remove filters, + // meaning that KV updates will be sent for _any_ KV key. + // The last call takes precedence, so, for example, calling with filter + // set to `['foo', 'bar']` and then with `['baz']` means that KV updates will + // be received for `baz` only, not for `foo`, `bar` or any other keys. + 'chelonia/kv/setFilter': function (contractID, filter) { + this.pubsub.setKvFilter(contractID, filter); + }, + 'chelonia/parseEncryptedOrUnencryptedDetachedMessage': function ({ contractID, serializedData, meta }) { + return parseEncryptedOrUnencryptedMessage(this, { + contractID, + serializedData, + meta + }); + } +}); +function contractNameFromAction(action) { + const regexResult = exports.ACTION_REGEX.exec(action); + const contractName = regexResult?.[2]; + if (!contractName) + throw new Error(`Poorly named action '${action}': missing contract name.`); + return contractName; +} +function outputEncryptedOrUnencryptedMessage({ contractID, innerSigningKeyId, encryptionKeyId, signingKeyId, data, meta }) { + const state = (0, sbp_1.default)(this.config.stateSelector)[contractID]; + const signedMessage = innerSigningKeyId + ? (state._vm.authorizedKeys[innerSigningKeyId] && state._vm.authorizedKeys[innerSigningKeyId]?._notAfterHeight == null) + ? (0, signedData_js_1.signedOutgoingData)(contractID, innerSigningKeyId, data, this.transientSecretKeys) + : (0, signedData_js_1.signedOutgoingDataWithRawKey)(this.transientSecretKeys[innerSigningKeyId], data) + : data; + const payload = !encryptionKeyId + ? signedMessage + : (0, encryptedData_js_1.encryptedOutgoingData)(contractID, encryptionKeyId, signedMessage); + const message = (0, signedData_js_1.signedOutgoingData)(contractID, signingKeyId, payload, this.transientSecretKeys); + const rootState = (0, sbp_1.default)(this.config.stateSelector); + const height = String(rootState.contracts[contractID].height); + const serializedData = { ...message.serialize((meta ?? '') + height), height }; + return serializedData; +} +function parseEncryptedOrUnencryptedMessage(ctx, { contractID, serializedData, meta }) { + if (!serializedData) { + throw new TypeError('[chelonia] parseEncryptedOrUnencryptedMessage: serializedData is required'); + } + const state = (0, sbp_1.default)(ctx.config.stateSelector)[contractID]; + const numericHeight = parseInt(serializedData.height); + const rootState = (0, sbp_1.default)(ctx.config.stateSelector); + const currentHeight = rootState.contracts[contractID].height; + if (!(numericHeight >= 0) || !(numericHeight <= currentHeight)) { + throw new Error(`[chelonia] parseEncryptedOrUnencryptedMessage: Invalid height ${serializedData.height}; it must be between 0 and ${currentHeight}`); + } + // Additional data used for verification + const aad = (meta ?? '') + serializedData.height; + const v = (0, signedData_js_1.signedIncomingData)(contractID, state, serializedData, numericHeight, aad, (message) => { + return (0, encryptedData_js_1.maybeEncryptedIncomingData)(contractID, state, message, numericHeight, ctx.transientSecretKeys, aad, undefined); + }); + // Cached values + let encryptionKeyId; + let innerSigningKeyId; + // Lazy unwrap function + // We don't use `unwrapMaybeEncryptedData`, which would almost do the same, + // because it swallows decryption errors, which we want to propagate to + // consumers of the KV API. + const unwrap = (() => { + let result; + return () => { + if (!result) { + try { + let unwrapped; + // First, we unwrap the signed data + unwrapped = v.valueOf(); + // If this is encrypted data, attempt decryption + if ((0, encryptedData_js_1.isEncryptedData)(unwrapped)) { + encryptionKeyId = unwrapped.encryptionKeyId; + unwrapped = unwrapped.valueOf(); + // There could be inner signed data (inner signatures), so we unwrap + // that too + if ((0, signedData_js_1.isSignedData)(unwrapped)) { + innerSigningKeyId = unwrapped.signingKeyId; + unwrapped = unwrapped.valueOf(); + } + else { + innerSigningKeyId = null; + } + } + else { + encryptionKeyId = null; + innerSigningKeyId = null; + } + result = [unwrapped]; + } + catch (e) { + result = [undefined, e]; + } + } + if (result.length === 2) { + throw result[1]; + } + return result[0]; + }; + })(); + const result = { + get contractID() { + return contractID; + }, + get innerSigningKeyId() { + if (innerSigningKeyId === undefined) { + try { + unwrap(); + } + catch { + // We're not interested in an error, that'd only be for the 'data' + // accessor. + } + } + return innerSigningKeyId; + }, + get encryptionKeyId() { + if (encryptionKeyId === undefined) { + try { + unwrap(); + } + catch { + // We're not interested in an error, that'd only be for the 'data' + // accessor. + } + } + return encryptionKeyId; + }, + get signingKeyId() { + return v.signingKeyId; + }, + get data() { + return unwrap(); + }, + get signingContractID() { + return (0, utils_js_1.getContractIDfromKeyId)(contractID, result.signingKeyId, state); + }, + get innerSigningContractID() { + return (0, utils_js_1.getContractIDfromKeyId)(contractID, result.innerSigningKeyId, state); + } + }; + return result; +} +async function outEncryptedOrUnencryptedAction(opType, params) { + const { atomic, action, contractID, data, hooks, publishOptions } = params; + const contractName = contractNameFromAction(action); + const manifestHash = this.config.contracts.manifests[contractName]; + const { contract } = this.manifestToContract[manifestHash]; + const state = contract.state(contractID); + const meta = await contract.metadata.create(); + const unencMessage = { action, data, meta }; + const signedMessage = params.innerSigningKeyId + ? (state._vm.authorizedKeys[params.innerSigningKeyId] && state._vm.authorizedKeys[params.innerSigningKeyId]?._notAfterHeight == null) + ? (0, signedData_js_1.signedOutgoingData)(contractID, params.innerSigningKeyId, unencMessage, this.transientSecretKeys) + : (0, signedData_js_1.signedOutgoingDataWithRawKey)(this.transientSecretKeys[params.innerSigningKeyId], unencMessage) + : unencMessage; + if (opType === SPMessage_js_1.SPMessage.OP_ACTION_ENCRYPTED && !params.encryptionKeyId) { + throw new Error('OP_ACTION_ENCRYPTED requires an encryption key ID be given'); + } + if (params.encryptionKey) { + if (params.encryptionKeyId !== (0, crypto_1.keyId)(params.encryptionKey)) { + throw new Error('OP_ACTION_ENCRYPTED raw encryption key does not match encryptionKeyId'); + } + } + const payload = opType === SPMessage_js_1.SPMessage.OP_ACTION_UNENCRYPTED + ? signedMessage + : params.encryptionKey + ? (0, encryptedData_js_1.encryptedOutgoingDataWithRawKey)(params.encryptionKey, signedMessage) + : (0, encryptedData_js_1.encryptedOutgoingData)(contractID, params.encryptionKeyId, signedMessage); + let message = SPMessage_js_1.SPMessage.createV1_0({ + contractID, + op: [ + opType, + (0, signedData_js_1.signedOutgoingData)(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + if (!atomic) { + message = await (0, sbp_1.default)('chelonia/private/out/publishEvent', message, publishOptions, hooks); + } + return message; +} +// The gettersProxy is what makes Vue-like getters possible. In other words, +// we want to make sure that the getter functions that we defined in each +// contract get passed the 'state' when a getter is accessed. +// We pass in the state by creating a Proxy object that does it for us. +// This allows us to maintain compatibility with Vue.js and integrate +// the contract getters into the Vue-facing getters. +// For this to work, other getters need to be implemented relative to a +// 'current' getter that returns the state itself. For example: +// ``` +// { +// currentMailboxState: (state) => state, // In the contract +// currentMailboxState: (state) => state[state.currentMailboxId], // In the app +// lastMessage: (state, getters) => // Shared getter for both app and contract +// getters.currentMailboxState.messages.slice(-1).pop() +// } +// ``` +function gettersProxy(state, getters) { + const proxyGetters = new Proxy({}, { + get(_target, prop) { + return getters[prop](state, proxyGetters); + } + }); + return { getters: proxyGetters }; +} +(0, sbp_1.default)('sbp/domains/lock', ['chelonia']); diff --git a/dist/cjs/chelonia.d.cts b/dist/cjs/chelonia.d.cts new file mode 100644 index 0000000..0101368 --- /dev/null +++ b/dist/cjs/chelonia.d.cts @@ -0,0 +1,175 @@ +import '@sbp/okturtles.eventqueue'; +import '@sbp/okturtles.events'; +import type { SPKey, SPOpKeyAdd, SPOpKeyDel, SPOpKeyRequestSeen, SPOpKeyShare, SPOpKeyUpdate } from './SPMessage.cjs'; +import type { Key } from '@chelonia/crypto'; +import { SPMessage } from './SPMessage.cjs'; +import './chelonia-utils.cjs'; +import type { EncryptedData } from './encryptedData.cjs'; +import './files.cjs'; +import './internals.cjs'; +import './time-sync.cjs'; +import { ChelContractState } from './types.cjs'; +export type ChelRegParams = { + contractName: string; + server?: string; + data: object; + signingKeyId: string; + actionSigningKeyId: string; + actionEncryptionKeyId?: string | null | undefined; + keys: (SPKey | EncryptedData)[]; + namespaceRegistration?: string | null | undefined; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + postpublishContract?: (msg: SPMessage) => void; + preSendCheck?: (msg: SPMessage, state: ChelContractState) => void; + beforeRequest?: (msg1: SPMessage, msg2: SPMessage) => Promise | void; + prepublish?: (msg: SPMessage) => Promise | void; + postpublish?: (msg: SPMessage) => Promise | void; + onprocessed?: (msg: SPMessage) => Promise | void; + }; + publishOptions?: { + headers?: Record | null | undefined; + billableContractID?: string | null | undefined; + maxAttempts?: number | null | undefined; + }; +}; +export type ChelActionParams = { + action: string; + server?: string; + contractID: string; + data: object; + signingKeyId: string; + innerSigningKeyId: string; + encryptionKeyId?: string | null | undefined; + encryptionKey?: Key | null | undefined; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise | void; + postpublish?: (msg: SPMessage) => Promise | void; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelKeyAddParams = { + contractName: string; + contractID: string; + data: SPOpKeyAdd; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise | void; + postpublish?: (msg: SPMessage) => Promise | void; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelKeyDelParams = { + contractName: string; + contractID: string; + data: SPOpKeyDel; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelKeyUpdateParams = { + contractName: string; + contractID: string; + data: SPOpKeyUpdate; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelKeyShareParams = { + originatingContractID?: string; + originatingContractName?: string; + contractID: string; + contractName: string; + data: SPOpKeyShare; + signingKeyId?: string; + signingKey?: Key; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts: number; + }; + atomic: boolean; +}; +export type ChelKeyRequestParams = { + originatingContractID: string; + originatingContractName: string; + contractName: string; + contractID: string; + signingKeyId: string; + innerSigningKeyId: string; + encryptionKeyId: string; + innerEncryptionKeyId: string; + encryptKeyRequestMetadata?: boolean; + permissions?: '*' | string[]; + allowedActions?: '*' | string[]; + reference?: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelKeyRequestResponseParams = { + contractName: string; + contractID: string; + data: SPOpKeyRequestSeen; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelAtomicParams = { + originatingContractID: string; + originatingContractName: string; + contractName: string; + contractID: string; + signingKeyId: string; + data: [sel: string, data: ChelActionParams | ChelKeyRequestParams | ChelKeyShareParams][]; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts?: number; + }; +}; +export { SPMessage }; +export declare const ACTION_REGEX: RegExp; +declare const _default: string[]; +export default _default; diff --git a/dist/cjs/constants.cjs b/dist/cjs/constants.cjs new file mode 100644 index 0000000..3a2c717 --- /dev/null +++ b/dist/cjs/constants.cjs @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.INVITE_STATUS = void 0; +exports.INVITE_STATUS = { + REVOKED: 'revoked', + VALID: 'valid', + USED: 'used' +}; diff --git a/dist/cjs/constants.d.cts b/dist/cjs/constants.d.cts new file mode 100644 index 0000000..7299f7b --- /dev/null +++ b/dist/cjs/constants.d.cts @@ -0,0 +1,5 @@ +export declare const INVITE_STATUS: { + REVOKED: string; + VALID: string; + USED: string; +}; diff --git a/dist/cjs/db.cjs b/dist/cjs/db.cjs new file mode 100644 index 0000000..e62059d --- /dev/null +++ b/dist/cjs/db.cjs @@ -0,0 +1,222 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.prefixHandlers = exports.parsePrefixableKey = exports.checkKey = void 0; +require("@sbp/okturtles.data"); +require("@sbp/okturtles.eventqueue"); +const sbp_1 = __importDefault(require("@sbp/sbp")); +const SPMessage_js_1 = require("./SPMessage.cjs"); +const errors_js_1 = require("./errors.cjs"); +const headPrefix = 'head='; +const getContractIdFromLogHead = (key) => { + if (!key.startsWith(headPrefix)) + return; + return key.slice(headPrefix.length); +}; +const getLogHead = (contractID) => `${headPrefix}${contractID}`; +const checkKey = (key) => { + // Disallow unprintable characters, slashes, and TAB. + // Also disallow characters not allowed by Windows: + // + if (/[\x00-\x1f\x7f\t\\/<>:"|?*]/.test(key)) { // eslint-disable-line no-control-regex + throw new Error(`bad key: ${JSON.stringify(key)}`); + } +}; +exports.checkKey = checkKey; +const parsePrefixableKey = (key) => { + const i = key.indexOf(':'); + if (i === -1) { + return ['', key]; + } + const prefix = key.slice(0, i + 1); + if (prefix in exports.prefixHandlers) { + return [prefix, key.slice(prefix.length)]; + } + throw new errors_js_1.ChelErrorDBConnection(`Unknown prefix in '${key}'.`); +}; +exports.parsePrefixableKey = parsePrefixableKey; +exports.prefixHandlers = { + // Decode buffers, but don't transform other values. + '': (value) => Buffer.isBuffer(value) ? value.toString('utf8') : value, + 'any:': (value) => value + /* + // 2025-03-24: Commented out because it's not used; currently, only `any:` + // is used in the `/file` route. + // Throw if the value if not a buffer. + 'blob:': value => { + if (Buffer.isBuffer(value)) { + return value + } + throw new ChelErrorDBConnection('Unexpected value: expected a buffer.') + } + */ +}; +// NOTE: To enable persistence of log use 'sbp/selectors/overwrite' +// to overwrite the following selectors: +(0, sbp_1.default)('sbp/selectors/unsafe', ['chelonia.db/get', 'chelonia.db/set', 'chelonia.db/delete']); +// NOTE: MAKE SURE TO CALL 'sbp/selectors/lock' after overwriting them! +// When using a lightweight client, the client doesn't keep a copy of messages +// in the DB. Therefore, `chelonia.db/*` selectors are mostly turned into no-ops. +// The `chelonia.db/get` selector is slightly more complex than a no-op, because +// Chelonia relies on being able to find the current contract head. To overcome +// this, if a head is requested, 'chelonia.db/get' returns information from +// the Chelonia contract state. +const dbPrimitiveSelectors = process.env.LIGHTWEIGHT_CLIENT === 'true' + ? { + 'chelonia.db/get': function (key) { + const id = getContractIdFromLogHead(key); + if (!id) + return Promise.resolve(); + const state = (0, sbp_1.default)('chelonia/rootState').contracts[id]; + const value = (state?.HEAD + ? JSON.stringify({ + HEAD: state.HEAD, + height: state.height, + previousKeyOp: state.previousKeyOp + }) + : undefined); + return Promise.resolve(value); + }, + 'chelonia.db/set': function () { + return Promise.resolve(); + }, + 'chelonia.db/delete': function () { + return Promise.resolve(true); + } + } + : { + // eslint-disable-next-line require-await + 'chelonia.db/get': async function (prefixableKey) { + const [prefix, key] = (0, exports.parsePrefixableKey)(prefixableKey); + const value = (0, sbp_1.default)('okTurtles.data/get', key); + if (value === undefined) { + return; + } + return exports.prefixHandlers[prefix](value); + }, + // eslint-disable-next-line require-await + 'chelonia.db/set': async function (key, value) { + (0, exports.checkKey)(key); + return (0, sbp_1.default)('okTurtles.data/set', key, value); + }, + // eslint-disable-next-line require-await + 'chelonia.db/delete': async function (key) { + return (0, sbp_1.default)('okTurtles.data/delete', key); + } + }; +exports.default = (0, sbp_1.default)('sbp/selectors/register', { + ...dbPrimitiveSelectors, + 'chelonia/db/getEntryMeta': async (contractID, height) => { + const entryMetaJson = await (0, sbp_1.default)('chelonia.db/get', `_private_hidx=${contractID}#${height}`); + if (!entryMetaJson) + return; + return JSON.parse(entryMetaJson); + }, + 'chelonia/db/setEntryMeta': async (contractID, height, entryMeta) => { + const entryMetaJson = JSON.stringify(entryMeta); + await (0, sbp_1.default)('chelonia.db/set', `_private_hidx=${contractID}#${height}`, entryMetaJson); + }, + 'chelonia/db/latestHEADinfo': async (contractID) => { + const r = await (0, sbp_1.default)('chelonia.db/get', getLogHead(contractID)); + return r && JSON.parse(r); + }, + 'chelonia/db/deleteLatestHEADinfo': (contractID) => { + return (0, sbp_1.default)('chelonia.db/set', getLogHead(contractID), ''); + }, + 'chelonia/db/getEntry': async function (hash) { + try { + const value = await (0, sbp_1.default)('chelonia.db/get', hash); + if (!value) + throw new Error(`no entry for ${hash}!`); + return SPMessage_js_1.SPMessage.deserialize(value, this.transientSecretKeys, undefined, this.config.unwrapMaybeEncryptedData); + } + catch (e) { + throw new errors_js_1.ChelErrorDBConnection(`${e.name} during getEntry: ${e.message}`); + } + }, + 'chelonia/db/addEntry': function (entry) { + // because addEntry contains multiple awaits - we want to make sure it gets executed + // "atomically" to minimize the chance of a contract fork + return (0, sbp_1.default)('okTurtles.eventQueue/queueEvent', `chelonia/db/${entry.contractID()}`, [ + 'chelonia/private/db/addEntry', entry + ]); + }, + // NEVER call this directly yourself! _always_ call 'chelonia/db/addEntry' instead + 'chelonia/private/db/addEntry': async function (entry) { + try { + const { previousHEAD: entryPreviousHEAD, previousKeyOp: entryPreviousKeyOp, height: entryHeight } = entry.head(); + const contractID = entry.contractID(); + if (await (0, sbp_1.default)('chelonia.db/get', entry.hash())) { + console.warn(`[chelonia.db] entry exists: ${entry.hash()}`); + return entry.hash(); + } + const HEADinfo = await (0, sbp_1.default)('chelonia/db/latestHEADinfo', contractID); + if (!entry.isFirstMessage()) { + if (!HEADinfo) { + throw new Error(`No latest HEAD for ${contractID} when attempting to process entry with previous HEAD ${entryPreviousHEAD} at height ${entryHeight}`); + } + const { HEAD: contractHEAD, previousKeyOp: contractPreviousKeyOp, height: contractHeight } = HEADinfo; + if (entryPreviousHEAD !== contractHEAD) { + console.warn(`[chelonia.db] bad previousHEAD: ${entryPreviousHEAD}! Expected: ${contractHEAD} for contractID: ${contractID}`); + throw new errors_js_1.ChelErrorDBBadPreviousHEAD(`bad previousHEAD: ${entryPreviousHEAD}. Expected ${contractHEAD} for contractID: ${contractID}`); + } + else if (entryPreviousKeyOp !== contractPreviousKeyOp) { + console.error(`[chelonia.db] bad previousKeyOp: ${entryPreviousKeyOp}! Expected: ${contractPreviousKeyOp} for contractID: ${contractID}`); + throw new errors_js_1.ChelErrorDBBadPreviousHEAD(`bad previousKeyOp: ${entryPreviousKeyOp}. Expected ${contractPreviousKeyOp} for contractID: ${contractID}`); + } + else if (!Number.isSafeInteger(entryHeight) || entryHeight !== (contractHeight + 1)) { + console.error(`[chelonia.db] bad height: ${entryHeight}! Expected: ${contractHeight + 1} for contractID: ${contractID}`); + throw new errors_js_1.ChelErrorDBBadPreviousHEAD(`[chelonia.db] bad height: ${entryHeight}! Expected: ${contractHeight + 1} for contractID: ${contractID}`); + } + } + else { + if (HEADinfo) { + console.error(`[chelonia.db] bad previousHEAD: ${entryPreviousHEAD}! Expected: for contractID: ${contractID}`); + throw new errors_js_1.ChelErrorDBBadPreviousHEAD(`bad previousHEAD: ${entryPreviousHEAD}. Expected for contractID: ${contractID}`); + } + else if (entryHeight !== 0) { + console.error(`[chelonia.db] bad height: ${entryHeight}! Expected: 0 for contractID: ${contractID}`); + throw new errors_js_1.ChelErrorDBBadPreviousHEAD(`[chelonia.db] bad height: ${entryHeight}! Expected: 0 for contractID: ${contractID}`); + } + } + await (0, sbp_1.default)('chelonia.db/set', entry.hash(), entry.serialize()); + await (0, sbp_1.default)('chelonia.db/set', getLogHead(contractID), JSON.stringify({ + HEAD: entry.hash(), + previousKeyOp: entry.isKeyOp() ? entry.hash() : entry.previousKeyOp(), + height: entry.height() + })); + console.debug(`[chelonia.db] HEAD for ${contractID} updated to:`, entry.hash()); + await (0, sbp_1.default)('chelonia/db/setEntryMeta', contractID, entryHeight, { + // The hash is used for reverse lookups (height to CID) + hash: entry.hash(), + // The date isn't currently used, but will be used for filtering messages + date: new Date().toISOString(), + // isKeyOp is used for filtering messages (the actual filtering is + // done more efficiently a separate index key, but `isKeyOp` allows + // us to bootstrap this process without having to load the full message) + // The separate index key bears the prefix `_private_keyop_idx_`. + ...(entry.isKeyOp() && { isKeyOp: true }) + }); + return entry.hash(); + } + catch (e) { + if (e.name.includes('ErrorDB')) { + throw e; // throw the specific type of ErrorDB instance + } + throw new errors_js_1.ChelErrorDBConnection(`${e.name} during addEntry: ${e.message}`); + } + }, + 'chelonia/db/lastEntry': async function (contractID) { + try { + const latestHEADinfo = await (0, sbp_1.default)('chelonia/db/latestHEADinfo', contractID); + if (!latestHEADinfo) + throw new Error(`contract ${contractID} has no latest hash!`); + return (0, sbp_1.default)('chelonia/db/getEntry', latestHEADinfo.HEAD); + } + catch (e) { + throw new errors_js_1.ChelErrorDBConnection(`${e.name} during lastEntry: ${e.message}`); + } + } +}); diff --git a/dist/cjs/db.d.cts b/dist/cjs/db.d.cts new file mode 100644 index 0000000..3b03b48 --- /dev/null +++ b/dist/cjs/db.d.cts @@ -0,0 +1,7 @@ +import '@sbp/okturtles.data'; +import '@sbp/okturtles.eventqueue'; +export declare const checkKey: (key: string) => void; +export declare const parsePrefixableKey: (key: string) => [string, string]; +export declare const prefixHandlers: Record unknown>; +declare const _default: string[]; +export default _default; diff --git a/dist/cjs/encryptedData.cjs b/dist/cjs/encryptedData.cjs new file mode 100644 index 0000000..669a1ef --- /dev/null +++ b/dist/cjs/encryptedData.cjs @@ -0,0 +1,334 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.maybeEncryptedIncomingData = exports.unwrapMaybeEncryptedData = exports.isRawEncryptedData = exports.encryptedDataKeyId = exports.encryptedIncomingDataWithRawKey = exports.encryptedIncomingForeignData = exports.encryptedIncomingData = exports.encryptedOutgoingDataWithRawKey = exports.encryptedOutgoingData = exports.isEncryptedData = void 0; +const crypto_1 = require("@chelonia/crypto"); +const sbp_1 = __importDefault(require("@sbp/sbp")); +const turtledash_1 = require("turtledash"); +const errors_js_1 = require("./errors.cjs"); +const signedData_js_1 = require("./signedData.cjs"); +const rootStateFn = () => (0, sbp_1.default)('chelonia/rootState'); +// `proto` & `wrapper` are utilities for `isEncryptedData` +const proto = Object.create(null, { + _isEncryptedData: { + value: true + } +}); +const wrapper = (o) => { + return Object.setPrototypeOf(o, proto); +}; +// `isEncryptedData` will return true for objects created by the various +// `encrypt*Data` functions. It's meant to implement functionality equivalent +// to `o instanceof EncryptedData` +const isEncryptedData = (o) => { + return !!o && !!Object.getPrototypeOf(o)?._isEncryptedData; +}; +exports.isEncryptedData = isEncryptedData; +// TODO: Check for permissions and allowedActions; this requires passing some +// additional context +const encryptData = function (stateOrContractID, eKeyId, data, additionalData) { + const state = typeof stateOrContractID === 'string' ? rootStateFn()[stateOrContractID] : stateOrContractID; + // Has the key been revoked? If so, attempt to find an authorized key by the same name + const designatedKey = state?._vm?.authorizedKeys?.[eKeyId]; + if (!designatedKey?.purpose.includes('enc')) { + throw new Error(`Encryption key ID ${eKeyId} is missing or is missing encryption purpose`); + } + if (designatedKey._notAfterHeight != null) { + const name = state._vm.authorizedKeys[eKeyId].name; + const newKeyId = Object.values(state._vm?.authorizedKeys).find((v) => v._notAfterHeight == null && v.name === name && v.purpose.includes('enc'))?.id; + if (!newKeyId) { + throw new Error(`Encryption key ID ${eKeyId} has been revoked and no new key exists by the same name (${name})`); + } + eKeyId = newKeyId; + } + const key = state._vm?.authorizedKeys?.[eKeyId].data; + if (!key) { + throw new Error(`Missing encryption key ${eKeyId}`); + } + const deserializedKey = typeof key === 'string' ? (0, crypto_1.deserializeKey)(key) : key; + return [ + (0, crypto_1.keyId)(deserializedKey), + (0, crypto_1.encrypt)(deserializedKey, JSON.stringify(data, (_, v) => { + if (v && (0, turtledash_1.has)(v, 'serialize') && typeof v.serialize === 'function') { + if (v.serialize.length === 1) { + return v.serialize(additionalData); + } + else { + return v.serialize(); + } + } + return v; + }), additionalData) + ]; +}; +// TODO: Check for permissions and allowedActions; this requires passing the +// entire SPMessage +const decryptData = function (state, height, data, additionalKeys, additionalData, validatorFn) { + if (!state) { + throw new errors_js_1.ChelErrorDecryptionError('Missing contract state'); + } + // Compatibility with signedData (composed signed + encrypted data) + if (typeof data.valueOf === 'function') + data = data.valueOf(); + if (!(0, exports.isRawEncryptedData)(data)) { + throw new errors_js_1.ChelErrorDecryptionError('Invalid message format'); + } + const [eKeyId, message] = data; + const key = additionalKeys[eKeyId]; + if (!key) { + throw new errors_js_1.ChelErrorDecryptionKeyNotFound(`Key ${eKeyId} not found`, { cause: eKeyId }); + } + // height as NaN is used to allow checking for revokedKeys as well as + // authorizedKeys when decrypting data. This is normally inappropriate because + // revoked keys should be considered compromised and not used for encrypting + // new data + // However, OP_KEY_SHARE may include data encrypted with some other contract's + // keys when a key rotation is done. This is done, along with OP_ATOMIC and + // OP_KEY_UPDATE to rotate keys in a contract while allowing member contracts + // to retrieve and use the new key material. + // In such scenarios, since the keys really live in that other contract, it is + // impossible to know if the keys had been revoked in the 'source' contract + // at the time the key rotation was done. This is also different from foreign + // keys because these encryption keys are not necessarily authorized in the + // contract issuing OP_KEY_SHARE, and what is important is to refer to the + // (keys in the) foreign contract explicitly, as an alternative to sending + // an OP_KEY_SHARE to that contract. + // Using revoked keys represents some security risk since, as mentioned, they + // should generlly be considered compromised. However, in the scenario above + // we can trust that the party issuing OP_KEY_SHARE is not maliciously using + // old (revoked) keys, because there is little to be gained from not doing + // this. If that party's intention were to leak or compromise keys, they can + // already do so by other means, since they have access to the raw secrets + // that OP_KEY_SHARE is meant to protect. Hence, this attack does not open up + // any new attack vectors or venues that were not already available using + // different means. + const designatedKey = state._vm?.authorizedKeys?.[eKeyId]; + if (!designatedKey || (height > designatedKey._notAfterHeight) || (height < designatedKey._notBeforeHeight) || !designatedKey.purpose.includes('enc')) { + throw new errors_js_1.ChelErrorUnexpected(`Key ${eKeyId} is unauthorized or expired for the current contract`); + } + const deserializedKey = typeof key === 'string' ? (0, crypto_1.deserializeKey)(key) : key; + try { + const result = JSON.parse((0, crypto_1.decrypt)(deserializedKey, message, additionalData)); + if (typeof validatorFn === 'function') + validatorFn(result, eKeyId); + return result; + } + catch (e) { + throw new errors_js_1.ChelErrorDecryptionError(e?.message || e); + } +}; +const encryptedOutgoingData = (stateOrContractID, eKeyId, data) => { + if (!stateOrContractID || data === undefined || !eKeyId) + throw new TypeError('Invalid invocation'); + const boundStringValueFn = encryptData.bind(null, stateOrContractID, eKeyId, data); + return wrapper({ + get encryptionKeyId() { + return eKeyId; + }, + get serialize() { + return (additionalData) => boundStringValueFn(additionalData || ''); + }, + get toString() { + return (additionalData) => JSON.stringify(this.serialize(additionalData)); + }, + get valueOf() { + return () => data; + } + }); +}; +exports.encryptedOutgoingData = encryptedOutgoingData; +// Used for OP_CONTRACT as a state does not yet exist +const encryptedOutgoingDataWithRawKey = (key, data) => { + if (data === undefined || !key) + throw new TypeError('Invalid invocation'); + const eKeyId = (0, crypto_1.keyId)(key); + const state = { + _vm: { + authorizedKeys: { + [eKeyId]: { + purpose: ['enc'], + data: (0, crypto_1.serializeKey)(key, false), + _notBeforeHeight: 0, + _notAfterHeight: undefined + } + } + } + }; + const boundStringValueFn = encryptData.bind(null, state, eKeyId, data); + return wrapper({ + get encryptionKeyId() { + return eKeyId; + }, + get serialize() { + return (additionalData) => boundStringValueFn(additionalData || ''); + }, + get toString() { + return (additionalData) => JSON.stringify(this.serialize(additionalData)); + }, + get valueOf() { + return () => data; + } + }); +}; +exports.encryptedOutgoingDataWithRawKey = encryptedOutgoingDataWithRawKey; +const encryptedIncomingData = (contractID, state, data, height, additionalKeys, additionalData, validatorFn) => { + let decryptedValue; + const decryptedValueFn = () => { + if (decryptedValue) { + return decryptedValue; + } + if (!state || !additionalKeys) { + const rootState = rootStateFn(); + state = state || rootState[contractID]; + additionalKeys = additionalKeys ?? rootState.secretKeys; + } + decryptedValue = decryptData(state, height, data, additionalKeys, additionalData || '', validatorFn); + if ((0, signedData_js_1.isRawSignedData)(decryptedValue)) { + decryptedValue = (0, signedData_js_1.signedIncomingData)(contractID, state, decryptedValue, height, additionalData || ''); + } + return decryptedValue; + }; + return wrapper({ + get encryptionKeyId() { + return (0, exports.encryptedDataKeyId)(data); + }, + get serialize() { + return () => data; + }, + get toString() { + return () => JSON.stringify(this.serialize()); + }, + get valueOf() { + return decryptedValueFn; + }, + get toJSON() { + return this.serialize; + } + }); +}; +exports.encryptedIncomingData = encryptedIncomingData; +const encryptedIncomingForeignData = (contractID, _0, data, _1, additionalKeys, additionalData, validatorFn) => { + let decryptedValue; + const decryptedValueFn = () => { + if (decryptedValue) { + return decryptedValue; + } + const rootState = rootStateFn(); + const state = rootState[contractID]; + decryptedValue = decryptData(state, NaN, data, additionalKeys ?? rootState.secretKeys, additionalData || '', validatorFn); + if ((0, signedData_js_1.isRawSignedData)(decryptedValue)) { + // TODO: Specify height + return (0, signedData_js_1.signedIncomingData)(contractID, state, decryptedValue, NaN, additionalData || ''); + } + return decryptedValue; + }; + return wrapper({ + get encryptionKeyId() { + return (0, exports.encryptedDataKeyId)(data); + }, + get serialize() { + return () => data; + }, + get toString() { + return () => JSON.stringify(this.serialize()); + }, + get valueOf() { + return decryptedValueFn; + }, + get toJSON() { + return this.serialize; + } + }); +}; +exports.encryptedIncomingForeignData = encryptedIncomingForeignData; +const encryptedIncomingDataWithRawKey = (key, data, additionalData) => { + if (data === undefined || !key) + throw new TypeError('Invalid invocation'); + let decryptedValue; + const eKeyId = (0, crypto_1.keyId)(key); + const decryptedValueFn = () => { + if (decryptedValue) { + return decryptedValue; + } + const state = { + _vm: { + authorizedKeys: { + [eKeyId]: { + purpose: ['enc'], + data: (0, crypto_1.serializeKey)(key, false), + _notBeforeHeight: 0, + _notAfterHeight: undefined + } + } + } + }; + decryptedValue = decryptData(state, NaN, data, { [eKeyId]: key }, additionalData || ''); + return decryptedValue; + }; + return wrapper({ + get encryptionKeyId() { + return (0, exports.encryptedDataKeyId)(data); + }, + get serialize() { + return () => data; + }, + get toString() { + return () => JSON.stringify(this.serialize()); + }, + get valueOf() { + return decryptedValueFn; + }, + get toJSON() { + return this.serialize; + } + }); +}; +exports.encryptedIncomingDataWithRawKey = encryptedIncomingDataWithRawKey; +const encryptedDataKeyId = (data) => { + if (!(0, exports.isRawEncryptedData)(data)) { + throw new errors_js_1.ChelErrorDecryptionError('Invalid message format'); + } + return data[0]; +}; +exports.encryptedDataKeyId = encryptedDataKeyId; +const isRawEncryptedData = (data) => { + if (!Array.isArray(data) || data.length !== 2 || data.map(v => typeof v).filter(v => v !== 'string').length !== 0) { + return false; + } + return true; +}; +exports.isRawEncryptedData = isRawEncryptedData; +const unwrapMaybeEncryptedData = (data) => { + if (data == null) + return; + if ((0, exports.isEncryptedData)(data)) { + try { + return { + encryptionKeyId: data.encryptionKeyId, + data: data.valueOf() + }; + } + catch (e) { + console.warn('unwrapMaybeEncryptedData: Unable to decrypt', e); + } + } + else { + return { + encryptionKeyId: null, + data + }; + } +}; +exports.unwrapMaybeEncryptedData = unwrapMaybeEncryptedData; +const maybeEncryptedIncomingData = (contractID, state, data, height, additionalKeys, additionalData, validatorFn) => { + if ((0, exports.isRawEncryptedData)(data)) { + return (0, exports.encryptedIncomingData)(contractID, state, data, height, additionalKeys, additionalData, validatorFn); + } + else { + validatorFn?.(data, ''); + return data; + } +}; +exports.maybeEncryptedIncomingData = maybeEncryptedIncomingData; diff --git a/dist/cjs/encryptedData.d.cts b/dist/cjs/encryptedData.d.cts new file mode 100644 index 0000000..fb95130 --- /dev/null +++ b/dist/cjs/encryptedData.d.cts @@ -0,0 +1,22 @@ +import type { Key } from '@chelonia/crypto'; +import type { ChelContractState } from './types.cjs'; +export interface EncryptedData { + encryptionKeyId: string; + valueOf: () => T; + serialize: (additionalData?: string) => [string, string]; + toString: (additionalData?: string) => string; + toJSON?: () => [string, string]; +} +export declare const isEncryptedData: (o: unknown) => o is EncryptedData; +export declare const encryptedOutgoingData: (stateOrContractID: string | ChelContractState, eKeyId: string, data: T) => EncryptedData; +export declare const encryptedOutgoingDataWithRawKey: (key: Key, data: T) => EncryptedData; +export declare const encryptedIncomingData: (contractID: string, state: ChelContractState, data: [string, string], height: number, additionalKeys?: Record, additionalData?: string, validatorFn?: (v: T, id: string) => void) => EncryptedData; +export declare const encryptedIncomingForeignData: (contractID: string, _0: never, data: [string, string], _1: never, additionalKeys?: Record, additionalData?: string, validatorFn?: (v: T, id: string) => void) => EncryptedData; +export declare const encryptedIncomingDataWithRawKey: (key: Key, data: [string, string], additionalData?: string) => EncryptedData; +export declare const encryptedDataKeyId: (data: unknown) => string; +export declare const isRawEncryptedData: (data: unknown) => data is [string, string]; +export declare const unwrapMaybeEncryptedData: (data: T | EncryptedData) => { + encryptionKeyId: string | null; + data: T; +} | undefined; +export declare const maybeEncryptedIncomingData: (contractID: string, state: ChelContractState, data: T | [string, string], height: number, additionalKeys?: Record, additionalData?: string, validatorFn?: (v: T, id: string) => void) => T | EncryptedData; diff --git a/dist/cjs/errors.cjs b/dist/cjs/errors.cjs new file mode 100644 index 0000000..710dbc9 --- /dev/null +++ b/dist/cjs/errors.cjs @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChelErrorResourceGone = exports.ChelErrorUnexpectedHttpResponseCode = exports.ChelErrorFetchServerTimeFailed = exports.ChelErrorSignatureKeyNotFound = exports.ChelErrorSignatureKeyUnauthorized = exports.ChelErrorSignatureError = exports.ChelErrorDecryptionKeyNotFound = exports.ChelErrorDecryptionError = exports.ChelErrorForkedChain = exports.ChelErrorUnrecoverable = exports.ChelErrorKeyAlreadyExists = exports.ChelErrorUnexpected = exports.ChelErrorDBConnection = exports.ChelErrorDBBadPreviousHEAD = exports.ChelErrorAlreadyProcessed = exports.ChelErrorWarning = exports.ChelErrorGenerator = void 0; +// ugly boilerplate because JavaScript is stupid +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error#Custom_Error_Types +const ChelErrorGenerator = (name, base = Error) => (class extends base { + constructor(...params) { + super(...params); + this.name = name; // string literal so minifier doesn't overwrite + // Polyfill for cause property + if (params[1]?.cause !== this.cause) { + Object.defineProperty(this, 'cause', { configurable: true, writable: true, value: params[1]?.cause }); + } + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}); +exports.ChelErrorGenerator = ChelErrorGenerator; +exports.ChelErrorWarning = (0, exports.ChelErrorGenerator)('ChelErrorWarning'); +exports.ChelErrorAlreadyProcessed = (0, exports.ChelErrorGenerator)('ChelErrorAlreadyProcessed'); +exports.ChelErrorDBBadPreviousHEAD = (0, exports.ChelErrorGenerator)('ChelErrorDBBadPreviousHEAD'); +exports.ChelErrorDBConnection = (0, exports.ChelErrorGenerator)('ChelErrorDBConnection'); +exports.ChelErrorUnexpected = (0, exports.ChelErrorGenerator)('ChelErrorUnexpected'); +exports.ChelErrorKeyAlreadyExists = (0, exports.ChelErrorGenerator)('ChelErrorKeyAlreadyExists'); +exports.ChelErrorUnrecoverable = (0, exports.ChelErrorGenerator)('ChelErrorUnrecoverable'); +exports.ChelErrorForkedChain = (0, exports.ChelErrorGenerator)('ChelErrorForkedChain'); +exports.ChelErrorDecryptionError = (0, exports.ChelErrorGenerator)('ChelErrorDecryptionError'); +exports.ChelErrorDecryptionKeyNotFound = (0, exports.ChelErrorGenerator)('ChelErrorDecryptionKeyNotFound', exports.ChelErrorDecryptionError); +exports.ChelErrorSignatureError = (0, exports.ChelErrorGenerator)('ChelErrorSignatureError'); +exports.ChelErrorSignatureKeyUnauthorized = (0, exports.ChelErrorGenerator)('ChelErrorSignatureKeyUnauthorized', exports.ChelErrorSignatureError); +exports.ChelErrorSignatureKeyNotFound = (0, exports.ChelErrorGenerator)('ChelErrorSignatureKeyNotFound', exports.ChelErrorSignatureError); +exports.ChelErrorFetchServerTimeFailed = (0, exports.ChelErrorGenerator)('ChelErrorFetchServerTimeFailed'); +exports.ChelErrorUnexpectedHttpResponseCode = (0, exports.ChelErrorGenerator)('ChelErrorUnexpectedHttpResponseCode'); +exports.ChelErrorResourceGone = (0, exports.ChelErrorGenerator)('ChelErrorResourceGone', exports.ChelErrorUnexpectedHttpResponseCode); diff --git a/dist/cjs/errors.d.cts b/dist/cjs/errors.d.cts new file mode 100644 index 0000000..554550a --- /dev/null +++ b/dist/cjs/errors.d.cts @@ -0,0 +1,17 @@ +export declare const ChelErrorGenerator: (name: string, base?: ErrorConstructor) => ErrorConstructor; +export declare const ChelErrorWarning: typeof Error; +export declare const ChelErrorAlreadyProcessed: typeof Error; +export declare const ChelErrorDBBadPreviousHEAD: typeof Error; +export declare const ChelErrorDBConnection: typeof Error; +export declare const ChelErrorUnexpected: typeof Error; +export declare const ChelErrorKeyAlreadyExists: typeof Error; +export declare const ChelErrorUnrecoverable: typeof Error; +export declare const ChelErrorForkedChain: typeof Error; +export declare const ChelErrorDecryptionError: typeof Error; +export declare const ChelErrorDecryptionKeyNotFound: typeof Error; +export declare const ChelErrorSignatureError: typeof Error; +export declare const ChelErrorSignatureKeyUnauthorized: typeof Error; +export declare const ChelErrorSignatureKeyNotFound: typeof Error; +export declare const ChelErrorFetchServerTimeFailed: typeof Error; +export declare const ChelErrorUnexpectedHttpResponseCode: typeof Error; +export declare const ChelErrorResourceGone: typeof Error; diff --git a/dist/cjs/events.cjs b/dist/cjs/events.cjs new file mode 100644 index 0000000..d2071b6 --- /dev/null +++ b/dist/cjs/events.cjs @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PERSISTENT_ACTION_TOTAL_FAILURE = exports.PERSISTENT_ACTION_SUCCESS = exports.PERSISTENT_ACTION_FAILURE = exports.CONTRACT_HAS_RECEIVED_KEYS = exports.CONTRACT_IS_PENDING_KEY_REQUESTS = exports.CONTRACT_UNREGISTERED = exports.CONTRACT_REGISTERED = exports.EVENT_HANDLED_READY = exports.EVENT_PUBLISHING_ERROR = exports.EVENT_PUBLISHED = exports.EVENT_HANDLED = exports.CONTRACTS_MODIFIED_READY = exports.CONTRACTS_MODIFIED = exports.CONTRACT_IS_SYNCING = exports.CHELONIA_RESET = void 0; +exports.CHELONIA_RESET = 'chelonia-reset'; +exports.CONTRACT_IS_SYNCING = 'contract-is-syncing'; +exports.CONTRACTS_MODIFIED = 'contracts-modified'; +exports.CONTRACTS_MODIFIED_READY = 'contracts-modified-ready'; +exports.EVENT_HANDLED = 'event-handled'; +exports.EVENT_PUBLISHED = 'event-published'; +exports.EVENT_PUBLISHING_ERROR = 'event-publishing-error'; +exports.EVENT_HANDLED_READY = 'event-handled-ready'; +exports.CONTRACT_REGISTERED = 'contract-registered'; +exports.CONTRACT_UNREGISTERED = 'contract-unregistered'; +exports.CONTRACT_IS_PENDING_KEY_REQUESTS = 'contract-is-pending-key-requests'; +exports.CONTRACT_HAS_RECEIVED_KEYS = 'contract-has-received-keys'; +exports.PERSISTENT_ACTION_FAILURE = 'persistent-action-failure'; +exports.PERSISTENT_ACTION_SUCCESS = 'persistent-action-success'; +exports.PERSISTENT_ACTION_TOTAL_FAILURE = 'persistent-action-total_failure'; diff --git a/dist/cjs/events.d.cts b/dist/cjs/events.d.cts new file mode 100644 index 0000000..909146b --- /dev/null +++ b/dist/cjs/events.d.cts @@ -0,0 +1,15 @@ +export declare const CHELONIA_RESET = "chelonia-reset"; +export declare const CONTRACT_IS_SYNCING = "contract-is-syncing"; +export declare const CONTRACTS_MODIFIED = "contracts-modified"; +export declare const CONTRACTS_MODIFIED_READY = "contracts-modified-ready"; +export declare const EVENT_HANDLED = "event-handled"; +export declare const EVENT_PUBLISHED = "event-published"; +export declare const EVENT_PUBLISHING_ERROR = "event-publishing-error"; +export declare const EVENT_HANDLED_READY = "event-handled-ready"; +export declare const CONTRACT_REGISTERED = "contract-registered"; +export declare const CONTRACT_UNREGISTERED = "contract-unregistered"; +export declare const CONTRACT_IS_PENDING_KEY_REQUESTS = "contract-is-pending-key-requests"; +export declare const CONTRACT_HAS_RECEIVED_KEYS = "contract-has-received-keys"; +export declare const PERSISTENT_ACTION_FAILURE = "persistent-action-failure"; +export declare const PERSISTENT_ACTION_SUCCESS = "persistent-action-success"; +export declare const PERSISTENT_ACTION_TOTAL_FAILURE = "persistent-action-total_failure"; diff --git a/dist/cjs/files.cjs b/dist/cjs/files.cjs new file mode 100644 index 0000000..30c416f --- /dev/null +++ b/dist/cjs/files.cjs @@ -0,0 +1,393 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.noneHandlers = exports.aes256gcmHandlers = void 0; +const encodeMultipartMessage_1 = __importDefault(require("@apeleghq/multipart-parser/encodeMultipartMessage")); +const decrypt_1 = __importDefault(require("@apeleghq/rfc8188/decrypt")); +const encodings_1 = require("@apeleghq/rfc8188/encodings"); +const encrypt_1 = __importDefault(require("@apeleghq/rfc8188/encrypt")); +const crypto_1 = require("@chelonia/crypto"); +const bytes_1 = require("@chelonia/multiformats/bytes"); +const sbp_1 = __importDefault(require("@sbp/sbp")); +const turtledash_1 = require("turtledash"); +const functions_js_1 = require("./functions.cjs"); +const utils_js_1 = require("./utils.cjs"); +// Snippet from +// Node.js supports request streams, but also this check isn't meant for Node.js +// This part only checks for client-side support. Later, when we try uploading +// a file for the first time, we'll check if requests work, as streams are not +// supported in HTTP/1.1 and lower versions. +let supportsRequestStreams = typeof window !== 'object' || (() => { + let duplexAccessed = false; + const hasContentType = new Request('', { + body: new ReadableStream(), + method: 'POST', + get duplex() { + duplexAccessed = true; + return 'half'; + } + }).headers.has('content-type'); + return duplexAccessed && !hasContentType; +})(); +const streamToUint8Array = async (s) => { + const reader = s.getReader(); + const chunks = []; + let length = 0; + for (;;) { + const result = await reader.read(); + if (result.done) + break; + chunks.push((0, bytes_1.coerce)(result.value)); + length += result.value.byteLength; + } + const body = new Uint8Array(length); + chunks.reduce((offset, chunk) => { + body.set(chunk, offset); + return offset + chunk.byteLength; + }, 0); + return body; +}; +// Check for streaming support, as of today (Feb 2024) only Blink- +// based browsers support this (i.e., Firefox and Safari don't). +const ArrayBufferToUint8ArrayStream = async function (connectionURL, s) { + // Even if the browser supports streams, some browsers (e.g., Chrome) also + // require that the server support HTTP/2 + if (supportsRequestStreams === true) { + await this.config.fetch(`${connectionURL}/streams-test`, { + method: 'POST', + body: new ReadableStream({ start(c) { c.enqueue(Buffer.from('ok')); c.close(); } }), + duplex: 'half' + }).then((r) => { + if (!r.ok) + throw new Error('Unexpected response'); + // supportsRequestStreams is tri-state + supportsRequestStreams = 2; + }).catch(() => { + console.info('files: Disabling streams support because the streams test failed'); + supportsRequestStreams = false; + }); + } + if (!supportsRequestStreams) { + return await streamToUint8Array(s); + } + return s.pipeThrough( + // eslint-disable-next-line no-undef + new TransformStream({ + transform(chunk, controller) { + controller.enqueue((0, bytes_1.coerce)(chunk)); + } + })); +}; +const computeChunkDescriptors = (inStream) => { + let length = 0; + const [lengthStream, cidStream] = inStream.tee(); + const lengthPromise = new Promise((resolve, reject) => { + lengthStream.pipeTo(new WritableStream({ + write(chunk) { + length += chunk.byteLength; + }, + close() { + resolve(length); + }, + abort(reason) { + reject(reason); + } + })); + }); + const cidPromise = (0, functions_js_1.createCIDfromStream)(cidStream, functions_js_1.multicodes.SHELTER_FILE_CHUNK); + return Promise.all([lengthPromise, cidPromise]); +}; +const fileStream = (chelonia, manifest) => { + const dataGenerator = async function* () { + let readSize = 0; + for (const chunk of manifest.chunks) { + if (!Array.isArray(chunk) || + typeof chunk[0] !== 'number' || + typeof chunk[1] !== 'string') { + throw new Error('Invalid chunk descriptor'); + } + const chunkResponse = await chelonia.config.fetch(`${chelonia.config.connectionURL}/file/${chunk[1]}`, { + method: 'GET', + signal: chelonia.abortController.signal + }); + if (!chunkResponse.ok) { + throw new Error('Unable to retrieve manifest'); + } + // TODO: We're reading the chunks in their entirety instead of using the + // stream interface. In the future, this can be changed to get a stream + // instead. Ensure then that the following checks are replaced with a + // streaming version (length and CID) + const chunkBinary = await chunkResponse.arrayBuffer(); + if (chunkBinary.byteLength !== chunk[0]) + throw new Error('mismatched chunk size'); + readSize += chunkBinary.byteLength; + if (readSize > manifest.size) + throw new Error('read size exceeds declared size'); + if ((0, functions_js_1.createCID)((0, bytes_1.coerce)(chunkBinary), functions_js_1.multicodes.SHELTER_FILE_CHUNK) !== chunk[1]) + throw new Error('mismatched chunk hash'); + yield chunkBinary; + } + // Now that we're done, we check to see if we read the correct size + // If all went well, we should have and this would never throw. However, + // if the payload was tampered with, we could have read a different size + // than expected. This will throw at the end, after all chunks are processed + // and after some or all of the data have already been consumed. + // If integrity of the entire payload is important, consumers must buffer + // the stream and wait until the end before any processing. + if (readSize !== manifest.size) + throw new Error('mismatched size'); + }; + const dataIterator = dataGenerator(); + return new ReadableStream({ + async pull(controller) { + try { + const chunk = await dataIterator.next(); + if (chunk.done) { + controller.close(); + return; + } + controller.enqueue(chunk.value); + } + catch (e) { + controller.error(e); + } + } + }); +}; +exports.aes256gcmHandlers = { + upload: (_chelonia, manifestOptions) => { + // IKM stands for Input Keying Material, and is a random value used to + // derive the encryption used in the chunks. See RFC 8188 for how the + // actual encryption key gets derived from the IKM. + const params = manifestOptions['cipher-params']; + let IKM = params?.IKM; + const recordSize = (params?.rs ?? 1 << 16); + if (!IKM) { + IKM = new Uint8Array(33); + self.crypto.getRandomValues(IKM); + } + // The keyId is only used as a sanity check but otherwise it is not needed + // Because the keyId is computed from the IKM, which is a secret, it is + // truncated to just eight characters so that it doesn't disclose too much + // information about the IKM (in theory, since it's a random string 33 bytes + // long, a full hash shouldn't disclose too much information anyhow). + // The reason the keyId is not _needed_ is that the IKM is part of the + // downloadParams, so anyone downloading a file should have the required + // context, and there is exactly one valid IKM for any downloadParams. + // By truncating the keyId, the only way to fully verify whether a given + // IKM decrypts a file is by attempting decryption. + // A side-effect of truncating the keyId is that, if the IKM were shared + // some other way (e.g., using the OP_KEY_SHARE mechanism), because of + // collisions it may not always be possible to look up the correct IKM. + // Therefore, a handler that uses a different strategy than the one used + // here (namely, including the IKM in the downloadParams) may need to use + // longer key IDs, possibly a full hash. + const keyId = (0, functions_js_1.blake32Hash)('aes256gcm-keyId' + (0, functions_js_1.blake32Hash)(IKM)).slice(-8); + const binaryKeyId = Buffer.from(keyId); + return { + cipherParams: { + keyId + }, + streamHandler: async (stream) => { + return await (0, encrypt_1.default)(encodings_1.aes256gcm, stream, recordSize, binaryKeyId, IKM); + }, + downloadParams: { + IKM: Buffer.from(IKM).toString('base64'), + rs: recordSize + } + }; + }, + download: (chelonia, downloadParams, manifest) => { + const IKMb64 = downloadParams.IKM; + if (!IKMb64) { + throw new Error('Missing IKM in downloadParams'); + } + const IKM = Buffer.from(IKMb64, 'base64'); + const keyId = (0, functions_js_1.blake32Hash)('aes256gcm-keyId' + (0, functions_js_1.blake32Hash)(IKM)).slice(-8); + if (!manifest['cipher-params'] || !manifest['cipher-params'].keyId) { + throw new Error('Missing cipher-params'); + } + if (keyId !== manifest['cipher-params'].keyId) { + throw new Error('Key ID mismatch'); + } + const maxRecordSize = downloadParams.rs ?? 1 << 27; // 128 MiB + return { + payloadHandler: async () => { + const bytes = await streamToUint8Array((0, decrypt_1.default)(encodings_1.aes256gcm, fileStream(chelonia, manifest), (actualKeyId) => { + if (Buffer.from(actualKeyId).toString() !== keyId) { + throw new Error('Invalid key ID'); + } + return IKM; + }, maxRecordSize)); + return new Blob([bytes], { type: manifest.type || 'application/octet-stream' }); + } + }; + } +}; +exports.noneHandlers = { + upload: () => { + return { + cipherParams: undefined, + streamHandler: (stream) => { + return stream; + }, + downloadParams: undefined + }; + }, + download: (chelonia, _downloadParams, manifest) => { + return { + payloadHandler: async () => { + const bytes = await streamToUint8Array(fileStream(chelonia, manifest)); + return new Blob([bytes], { type: manifest.type || 'application/octet-stream' }); + } + }; + } +}; +// TODO: Move into Chelonia config +const cipherHandlers = { + aes256gcm: exports.aes256gcmHandlers, + none: exports.noneHandlers +}; +exports.default = (0, sbp_1.default)('sbp/selectors/register', { + 'chelonia/fileUpload': async function (chunks, manifestOptions, { billableContractID } = {}) { + if (!Array.isArray(chunks)) + chunks = [chunks]; + const chunkDescriptors = []; + const cipherHandler = await cipherHandlers[manifestOptions.cipher]?.upload?.(this, manifestOptions); + if (!cipherHandler) + throw new Error('Unsupported cipher'); + const cipherParams = cipherHandler.cipherParams; + const transferParts = await Promise.all(chunks.map(async (chunk, i) => { + const stream = chunk.stream(); + const encryptedStream = await cipherHandler.streamHandler(stream); + const [body, s] = encryptedStream.tee(); + chunkDescriptors.push(computeChunkDescriptors(s)); + return { + headers: new Headers([ + ['content-disposition', `form-data; name="${i}"; filename="${i}"`], + ['content-type', 'application/octet-stream'] + ]), + body + }; + })); + transferParts.push({ + headers: new Headers([ + ['content-disposition', 'form-data; name="manifest"; filename="manifest.json"'], + ['content-type', 'application/vnd.shelter.filemanifest'] + ]), + body: new ReadableStream({ + async start(controller) { + const chunks = await Promise.all(chunkDescriptors); + const manifest = { + version: '1.0.0', + // ?? undefined coerces null and undefined to undefined + // This ensures that null or undefined values don't make it to the + // JSON (otherwise, null values _would_ be stringified as 'null') + type: manifestOptions.type ?? undefined, + meta: manifestOptions.meta ?? undefined, + cipher: manifestOptions.cipher, + 'cipher-params': cipherParams, + size: chunks.reduce((acc, [cv]) => acc + cv, 0), + chunks, + 'name-map': manifestOptions['name-map'] ?? undefined, + alternatives: manifestOptions.alternatives ?? undefined + }; + controller.enqueue(Buffer.from(JSON.stringify(manifest))); + controller.close(); + } + }) + }); + // TODO: Using `self.crypto.randomUUID` breaks the tests. Maybe upgrading + // Cypress would fix this. + const boundary = typeof self.crypto?.randomUUID === 'function' + ? self.crypto.randomUUID() + // If randomUUID not available, we instead compute a random boundary + // The indirect call to Math.random (`(0, Math.random)`) is to explicitly + // mark that we intend on using Math.random, even though it's not a + // CSPRNG, so that it's not reported as a bug in by static analysis tools. + : new Array(36).fill('').map(() => 'abcdefghijklmnopqrstuvwxyz'[(0, Math.random)() * 26 | 0]).join(''); + const stream = (0, encodeMultipartMessage_1.default)(boundary, transferParts); + const deletionToken = 'deletionToken' + (0, crypto_1.generateSalt)(); + const deletionTokenHash = (0, functions_js_1.blake32Hash)(deletionToken); + const uploadResponse = await this.config.fetch(`${this.config.connectionURL}/file`, { + method: 'POST', + signal: this.abortController.signal, + body: await ArrayBufferToUint8ArrayStream.call(this, this.config.connectionURL, stream), + headers: new Headers([ + ...(billableContractID ? [['authorization', utils_js_1.buildShelterAuthorizationHeader.call(this, billableContractID)]] : []), + ['content-type', `multipart/form-data; boundary=${boundary}`], + ['shelter-deletion-token-digest', deletionTokenHash] + ]), + duplex: 'half' + }); + if (!uploadResponse.ok) + throw new Error('Error uploading file'); + return { + download: { + manifestCid: await uploadResponse.text(), + downloadParams: cipherHandler.downloadParams + }, + delete: deletionToken + }; + }, + 'chelonia/fileDownload': async function (downloadOptions, manifestChecker) { + // Using a function to prevent accidental logging + const { manifestCid, downloadParams } = downloadOptions.valueOf(); + const manifestResponse = await this.config.fetch(`${this.config.connectionURL}/file/${manifestCid}`, { + method: 'GET', + signal: this.abortController.signal + }); + if (!manifestResponse.ok) { + throw new Error('Unable to retrieve manifest'); + } + const manifestBinary = await manifestResponse.arrayBuffer(); + if ((0, functions_js_1.createCID)((0, bytes_1.coerce)(manifestBinary), functions_js_1.multicodes.SHELTER_FILE_MANIFEST) !== manifestCid) + throw new Error('mismatched manifest hash'); + const manifest = JSON.parse(Buffer.from(manifestBinary).toString()); + if (typeof manifest !== 'object') + throw new Error('manifest format is invalid'); + if (manifest.version !== '1.0.0') + throw new Error('unsupported manifest version'); + if (!Array.isArray(manifest.chunks)) + throw new Error('missing required field: chunks'); + if (manifestChecker) { + const proceed = await manifestChecker?.(manifest); + if (!proceed) + return false; + } + const cipherHandler = await cipherHandlers[manifest.cipher]?.download?.(this, downloadParams, manifest); + if (!cipherHandler) + throw new Error('Unsupported cipher'); + return cipherHandler.payloadHandler(); + }, + 'chelonia/fileDelete': async function (manifestCid, credentials = {}) { + if (!manifestCid) { + throw new TypeError('A manifest CID must be provided'); + } + if (!Array.isArray(manifestCid)) + manifestCid = [manifestCid]; + return await Promise.allSettled(manifestCid.map(async (cid) => { + const hasCredential = (0, turtledash_1.has)(credentials, cid); + const hasToken = (0, turtledash_1.has)(credentials[cid], 'token') && credentials[cid].token; + const hasBillableContractID = (0, turtledash_1.has)(credentials[cid], 'billableContractID') && credentials[cid].billableContractID; + if (!hasCredential || hasToken === hasBillableContractID) { + throw new TypeError(`Either a token or a billable contract ID must be provided for ${cid}`); + } + const response = await this.config.fetch(`${this.config.connectionURL}/deleteFile/${cid}`, { + method: 'POST', + signal: this.abortController.signal, + headers: new Headers([ + ['authorization', + hasToken + ? `bearer ${credentials[cid].token.valueOf()}` + : utils_js_1.buildShelterAuthorizationHeader.call(this, credentials[cid].billableContractID)] + ]) + }); + if (!response.ok) { + throw new Error(`Unable to delete file ${cid}`); + } + })); + } +}); diff --git a/dist/cjs/files.d.cts b/dist/cjs/files.d.cts new file mode 100644 index 0000000..7c22c52 --- /dev/null +++ b/dist/cjs/files.d.cts @@ -0,0 +1,31 @@ +import { CheloniaContext, ChelFileManifest } from './types.cjs'; +export declare const aes256gcmHandlers: { + upload: (_chelonia: CheloniaContext, manifestOptions: ChelFileManifest) => { + cipherParams: { + keyId: string; + }; + streamHandler: (stream: ReadableStream) => Promise>; + downloadParams: { + IKM: string; + rs: number; + }; + }; + download: (chelonia: CheloniaContext, downloadParams: { + IKM?: string; + rs?: number; + }, manifest: ChelFileManifest) => { + payloadHandler: () => Promise; + }; +}; +export declare const noneHandlers: { + upload: () => { + cipherParams: undefined; + streamHandler: (stream: ReadableStream) => ReadableStream; + downloadParams: undefined; + }; + download: (chelonia: CheloniaContext, _downloadParams: object, manifest: ChelFileManifest) => { + payloadHandler: () => Promise; + }; +}; +declare const _default: string[]; +export default _default; diff --git a/dist/cjs/functions.cjs b/dist/cjs/functions.cjs new file mode 100644 index 0000000..655aa72 --- /dev/null +++ b/dist/cjs/functions.cjs @@ -0,0 +1,129 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSubscriptionId = exports.bytesToB64 = exports.strToB64 = exports.strToBuf = exports.bufToB64 = exports.b64ToStr = exports.b64ToBuf = exports.maybeParseCID = exports.parseCID = exports.multicodes = void 0; +exports.createCIDfromStream = createCIDfromStream; +exports.createCID = createCID; +exports.blake32Hash = blake32Hash; +const base58_1 = require("@chelonia/multiformats/bases/base58"); +const blake2b_1 = require("@chelonia/multiformats/blake2b"); +const blake2bstream_1 = require("@chelonia/multiformats/blake2bstream"); +const cid_1 = require("@chelonia/multiformats/cid"); +// Use 'buffer' instead of 'node:buffer' to polyfill in the browser +const buffer_1 = require("buffer"); +const turtledash_1 = require("turtledash"); +// Values from https://github.com/multiformats/multicodec/blob/master/table.csv +exports.multicodes = { + RAW: 0x00, + JSON: 0x0200, + SHELTER_CONTRACT_MANIFEST: 0x511e00, + SHELTER_CONTRACT_TEXT: 0x511e01, + SHELTER_CONTRACT_DATA: 0x511e02, + SHELTER_FILE_MANIFEST: 0x511e03, + SHELTER_FILE_CHUNK: 0x511e04 +}; +const parseCID = (cid) => { + if (!cid || cid.length < 52 || cid.length > 64) { + throw new RangeError('CID length too short or too long'); + } + const parsed = cid_1.CID.parse(cid, base58_1.base58btc); + if (parsed.version !== 1 || + parsed.multihash.code !== blake2b_1.blake2b256.code || + !Object.values(exports.multicodes).includes(parsed.code)) { + throw new Error('Invalid CID'); + } + return parsed; +}; +exports.parseCID = parseCID; +const maybeParseCID = (cid) => { + try { + return (0, exports.parseCID)(cid); + } + catch { + // Ignore errors if the CID couldn't be parsed + return null; + } +}; +exports.maybeParseCID = maybeParseCID; +// Makes the `Buffer` global available in the browser if needed. +if (typeof globalThis === 'object' && !(0, turtledash_1.has)(globalThis, 'Buffer')) { + globalThis.Buffer = buffer_1.Buffer; +} +async function createCIDfromStream(data, multicode = exports.multicodes.RAW) { + const uint8array = typeof data === 'string' ? new TextEncoder().encode(data) : data; + const digest = await blake2bstream_1.blake2b256stream.digest(uint8array); + return cid_1.CID.create(1, multicode, digest).toString(base58_1.base58btc); +} +// TODO: implement a streaming hashing function for large files. +// Note: in fact this returns a serialized CID, not a CID object. +function createCID(data, multicode = exports.multicodes.RAW) { + const uint8array = typeof data === 'string' ? new TextEncoder().encode(data) : data; + const digest = blake2b_1.blake2b256.digest(uint8array); + return cid_1.CID.create(1, multicode, digest).toString(base58_1.base58btc); +} +function blake32Hash(data) { + const uint8array = typeof data === 'string' ? new TextEncoder().encode(data) : data; + const digest = blake2b_1.blake2b256.digest(uint8array); + // While `digest.digest` is only 32 bytes long in this case, + // `digest.bytes` is 36 bytes because it includes a multiformat prefix. + return base58_1.base58btc.encode(digest.bytes); +} +// NOTE: to preserve consistency across browser and node, we use the Buffer +// class. We could use btoa and atob in web browsers (functions that +// are unavailable on Node.js), but they do not support Unicode, +// and you have to jump through some hoops to get it to work: +// https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/btoa#Unicode_strings +// These hoops might result in inconsistencies between Node.js and the frontend. +const b64ToBuf = (b64) => buffer_1.Buffer.from(b64, 'base64'); +exports.b64ToBuf = b64ToBuf; +const b64ToStr = (b64) => (0, exports.b64ToBuf)(b64).toString('utf8'); +exports.b64ToStr = b64ToStr; +const bufToB64 = (buf) => buffer_1.Buffer.from(buf).toString('base64'); +exports.bufToB64 = bufToB64; +const strToBuf = (str) => buffer_1.Buffer.from(str, 'utf8'); +exports.strToBuf = strToBuf; +const strToB64 = (str) => (0, exports.strToBuf)(str).toString('base64'); +exports.strToB64 = strToB64; +const bytesToB64 = (ary) => buffer_1.Buffer.from(ary).toString('base64'); +exports.bytesToB64 = bytesToB64; +// Generate an UUID from a `PushSubscription' +const getSubscriptionId = async (subscriptionInfo) => { + const textEncoder = new TextEncoder(); + // + const endpoint = textEncoder.encode(subscriptionInfo.endpoint); + // + const p256dh = textEncoder.encode(subscriptionInfo.keys.p256dh); + const auth = textEncoder.encode(subscriptionInfo.keys.auth); + const canonicalForm = new ArrayBuffer(8 + + (4 + endpoint.byteLength) + (2 + p256dh.byteLength) + + (2 + auth.byteLength)); + const canonicalFormU8 = new Uint8Array(canonicalForm); + const canonicalFormDV = new DataView(canonicalForm); + let offset = 0; + canonicalFormDV.setFloat64(offset, subscriptionInfo.expirationTime == null + ? NaN + : subscriptionInfo.expirationTime, false); + offset += 8; + canonicalFormDV.setUint32(offset, endpoint.byteLength, false); + offset += 4; + canonicalFormU8.set(endpoint, offset); + offset += endpoint.byteLength; + canonicalFormDV.setUint16(offset, p256dh.byteLength, false); + offset += 2; + canonicalFormU8.set(p256dh, offset); + offset += p256dh.byteLength; + canonicalFormDV.setUint16(offset, auth.byteLength, false); + offset += 2; + canonicalFormU8.set(auth, offset); + const digest = await crypto.subtle.digest('SHA-384', canonicalForm); + const id = buffer_1.Buffer.from(digest.slice(0, 16)); + id[6] = 0x80 | (id[6] & 0x0F); + id[8] = 0x80 | (id[8] & 0x3F); + return [ + id.slice(0, 4), + id.slice(4, 6), + id.slice(6, 8), + id.slice(8, 10), + id.slice(10, 16) + ].map((p) => p.toString('hex')).join('-'); +}; +exports.getSubscriptionId = getSubscriptionId; diff --git a/dist/cjs/functions.d.cts b/dist/cjs/functions.d.cts new file mode 100644 index 0000000..4933e96 --- /dev/null +++ b/dist/cjs/functions.d.cts @@ -0,0 +1,15 @@ +import { CID } from '@chelonia/multiformats/cid'; +import { Buffer } from 'buffer'; +export declare const multicodes: Record; +export declare const parseCID: (cid: string) => CID; +export declare const maybeParseCID: (cid: string) => CID | null; +export declare function createCIDfromStream(data: string | Uint8Array | ReadableStream, multicode?: number): Promise; +export declare function createCID(data: string | Uint8Array, multicode?: number): string; +export declare function blake32Hash(data: string | Uint8Array): string; +export declare const b64ToBuf: (b64: string) => Buffer; +export declare const b64ToStr: (b64: string) => string; +export declare const bufToB64: (buf: Buffer) => string; +export declare const strToBuf: (str: string) => Buffer; +export declare const strToB64: (str: string) => string; +export declare const bytesToB64: (ary: Uint8Array) => string; +export declare const getSubscriptionId: (subscriptionInfo: ReturnType) => Promise; diff --git a/dist/cjs/index.cjs b/dist/cjs/index.cjs new file mode 100644 index 0000000..592f44e --- /dev/null +++ b/dist/cjs/index.cjs @@ -0,0 +1,39 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const chelonia_js_1 = __importDefault(require("./chelonia.cjs")); +const db_js_1 = __importDefault(require("./db.cjs")); +const files_js_1 = __importDefault(require("./files.cjs")); +const persistent_actions_js_1 = __importDefault(require("./persistent-actions.cjs")); +__exportStar(require("./SPMessage.cjs"), exports); +__exportStar(require("./Secret.cjs"), exports); +__exportStar(require("./chelonia.cjs"), exports); +__exportStar(require("./constants.cjs"), exports); +__exportStar(require("./db.cjs"), exports); +__exportStar(require("./encryptedData.cjs"), exports); +__exportStar(require("./errors.cjs"), exports); +__exportStar(require("./events.cjs"), exports); +__exportStar(require("./files.cjs"), exports); +__exportStar(require("./persistent-actions.cjs"), exports); +__exportStar(require("./presets.cjs"), exports); +__exportStar(require("./pubsub/index.cjs"), exports); +__exportStar(require("./signedData.cjs"), exports); +__exportStar(require("./types.cjs"), exports); +__exportStar(require("./utils.cjs"), exports); +exports.default = [...chelonia_js_1.default, ...db_js_1.default, ...files_js_1.default, ...persistent_actions_js_1.default]; diff --git a/dist/cjs/index.d.cts b/dist/cjs/index.d.cts new file mode 100644 index 0000000..416920c --- /dev/null +++ b/dist/cjs/index.d.cts @@ -0,0 +1,17 @@ +export * from './SPMessage.cjs'; +export * from './Secret.cjs'; +export * from './chelonia.cjs'; +export * from './constants.cjs'; +export * from './db.cjs'; +export * from './encryptedData.cjs'; +export * from './errors.cjs'; +export * from './events.cjs'; +export * from './files.cjs'; +export * from './persistent-actions.cjs'; +export * from './presets.cjs'; +export * from './pubsub/index.cjs'; +export * from './signedData.cjs'; +export * from './types.cjs'; +export * from './utils.cjs'; +declare const _default: string[]; +export default _default; diff --git a/dist/cjs/internals.cjs b/dist/cjs/internals.cjs new file mode 100644 index 0000000..e70b5ee --- /dev/null +++ b/dist/cjs/internals.cjs @@ -0,0 +1,2241 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +const sbp_1 = __importStar(require("@sbp/sbp")); +const functions_js_1 = require("./functions.cjs"); +const turtledash_1 = require("turtledash"); +const SPMessage_js_1 = require("./SPMessage.cjs"); +const Secret_js_1 = require("./Secret.cjs"); +const constants_js_1 = require("./constants.cjs"); +const crypto_1 = require("@chelonia/crypto"); +require("./db.cjs"); +const encryptedData_js_1 = require("./encryptedData.cjs"); +const errors_js_1 = require("./errors.cjs"); +const events_js_1 = require("./events.cjs"); +const utils_js_1 = require("./utils.cjs"); +const signedData_js_1 = require("./signedData.cjs"); +// import 'ses' +// Used for temporarily storing the missing decryption key IDs in a given +// message +const missingDecryptionKeyIdsMap = new WeakMap(); +const getMsgMeta = function (message, contractID, state, index) { + const signingKeyId = message.signingKeyId(); + let innerSigningKeyId = null; + const config = this.config; + const result = { + signingKeyId, + get signingContractID() { + return (0, utils_js_1.getContractIDfromKeyId)(contractID, signingKeyId, state); + }, + get innerSigningKeyId() { + if (innerSigningKeyId === null) { + const value = message.message(); + const data = config.unwrapMaybeEncryptedData(value); + if (data?.data && (0, signedData_js_1.isSignedData)(data.data)) { + innerSigningKeyId = data.data.signingKeyId; + } + else { + innerSigningKeyId = undefined; + } + return innerSigningKeyId; + } + }, + get innerSigningContractID() { + return (0, utils_js_1.getContractIDfromKeyId)(contractID, result.innerSigningKeyId, state); + }, + index + }; + return result; +}; +const keysToMap = function (keys_, height, authorizedKeys) { + // Using cloneDeep to ensure that the returned object is serializable + // Keys in a SPMessage may not be serializable (i.e., supported by the + // structured clone algorithm) when they contain encryptedIncomingData + const keys = keys_.map((key) => { + const data = this.config.unwrapMaybeEncryptedData(key); + if (!data) + return undefined; + if (data.encryptionKeyId) { + data.data._private = data.encryptionKeyId; + } + return data.data; + // eslint-disable-next-line no-use-before-define + }).filter(Boolean); + const keysCopy = (0, turtledash_1.cloneDeep)(keys); + return Object.fromEntries(keysCopy.map((key) => { + key._notBeforeHeight = height; + if (authorizedKeys?.[key.id]) { + if (authorizedKeys[key.id]._notAfterHeight == null) { + throw new errors_js_1.ChelErrorKeyAlreadyExists(`Cannot set existing unrevoked key: ${key.id}`); + } + // If the key was get previously, preserve its _notBeforeHeight + // NOTE: (SECURITY) This may allow keys for periods for which it wasn't + // supposed to be active. This is a trade-off for simplicity, instead of + // considering discrete periods, which is the correct solution + // Discrete ranges *MUST* be implemented because they impact permissions + key._notBeforeHeight = Math.min(height, authorizedKeys[key.id]._notBeforeHeight ?? 0); + } + else { + key._notBeforeHeight = height; + } + delete key._notAfterHeight; + return [key.id, key]; + })); +}; +const keyRotationHelper = (contractID, state, config, updatedKeysMap, requiredPermissions, outputSelector, outputMapper, internalSideEffectStack) => { + if (!internalSideEffectStack || !Array.isArray(state._volatile?.watch)) + return; + const rootState = (0, sbp_1.default)(config.stateSelector); + const watchMap = Object.create(null); + state._volatile.watch.forEach(([name, cID]) => { + if (!updatedKeysMap[name] || watchMap[cID] === null) { + return; + } + if (!watchMap[cID]) { + if (!rootState.contracts[cID]?.type || !(0, utils_js_1.findSuitableSecretKeyId)(rootState[cID], [SPMessage_js_1.SPMessage.OP_KEY_UPDATE], ['sig'])) { + watchMap[cID] = null; + return; + } + watchMap[cID] = []; + } + watchMap[cID].push(name); + }); + Object.entries(watchMap).forEach(([cID, names]) => { + if (!Array.isArray(names) || !names.length) + return; + const [keyNamesToUpdate, signingKeyId] = names.map((name) => { + const foreignContractKey = rootState[cID]?._vm?.authorizedKeys?.[updatedKeysMap[name].oldKeyId]; + if (!foreignContractKey) + return undefined; + const signingKeyId = (0, utils_js_1.findSuitableSecretKeyId)(rootState[cID], requiredPermissions, ['sig'], foreignContractKey.ringLevel); + if (signingKeyId) { + return [[name, foreignContractKey.name], signingKeyId, rootState[cID]._vm.authorizedKeys[signingKeyId].ringLevel]; + } + return undefined; + // eslint-disable-next-line no-use-before-define + }).filter(Boolean) + .reduce((acc, [name, signingKeyId, ringLevel]) => { + acc[0].push(name); + return ringLevel < acc[2] ? [acc[0], signingKeyId, ringLevel] : acc; + }, [[], undefined, Number.POSITIVE_INFINITY]); + if (!signingKeyId) + return; + // Send output based on keyNamesToUpdate, signingKeyId + const contractName = rootState.contracts[cID]?.type; + internalSideEffectStack?.push(() => { + // We can't await because it'll block on a different contract, which + // is possibly waiting on this current contract. + (0, sbp_1.default)(outputSelector, { + contractID: cID, + contractName, + data: keyNamesToUpdate.map(outputMapper).map((v) => { + return v; + }), + signingKeyId + }).catch((e) => { + console.warn(`Error mirroring key operation (${outputSelector}) from ${contractID} to ${cID}: ${e?.message || e}`); + }); + }); + }); +}; +// export const FERAL_FUNCTION = Function +exports.default = (0, sbp_1.default)('sbp/selectors/register', { + // DO NOT CALL ANY OF THESE YOURSELF! + 'chelonia/private/state': function () { + return this.state; + }, + 'chelonia/private/invoke': function (instance, invocation) { + // If this._instance !== instance (i.e., chelonia/reset was called) + if (this._instance !== instance) { + console.info('[\'chelonia/private/invoke] Not proceeding with invocation as Chelonia was restarted', { invocation }); + return; + } + if (Array.isArray(invocation)) { + return (0, sbp_1.default)(...invocation); + } + else if (typeof invocation === 'function') { + return invocation(); + } + else { + throw new TypeError(`[chelonia/private/invoke] Expected invocation to be an array or a function. Saw ${typeof invocation} instead.`); + } + }, + 'chelonia/private/queueEvent': function (queueName, invocation) { + return (0, sbp_1.default)('okTurtles.eventQueue/queueEvent', queueName, ['chelonia/private/invoke', this._instance, invocation]); + }, + 'chelonia/private/verifyManifestSignature': function (contractName, manifestHash, manifest) { + // We check that the manifest contains a 'signature' field with the correct + // shape + if (!(0, turtledash_1.has)(manifest, 'signature') || typeof manifest.signature.keyId !== 'string' || typeof manifest.signature.value !== 'string') { + throw new Error(`Invalid or missing signature field for manifest ${manifestHash} (named ${contractName})`); + } + // Now, start the signature verification process + const rootState = (0, sbp_1.default)(this.config.stateSelector); + if (!(0, turtledash_1.has)(rootState, 'contractSigningKeys')) { + this.config.reactiveSet(rootState, 'contractSigningKeys', Object.create(null)); + } + // Because `contractName` comes from potentially unsafe sources (for + // instance, from `processMessage`), the key isn't used directly because + // it could overlap with current or future 'special' key names in JavaScript, + // such as `prototype`, `__proto__`, etc. We also can't guarantee that the + // `contractSigningKeys` always has a null prototype, and, because of the + // way we manage state, neither can we use `Map`. So, we use prefix for the + // lookup key that's unlikely to ever be part of a special JS name. + const contractNameLookupKey = `name:${contractName}`; + // If the contract name has been seen before, validate its signature now + let signatureValidated = false; + if (process.env.UNSAFE_TRUST_ALL_MANIFEST_SIGNING_KEYS !== 'true' && (0, turtledash_1.has)(rootState.contractSigningKeys, contractNameLookupKey)) { + console.info(`[chelonia] verifying signature for ${manifestHash} with an existing key`); + if (!(0, turtledash_1.has)(rootState.contractSigningKeys[contractNameLookupKey], manifest.signature.keyId)) { + console.error(`The manifest with ${manifestHash} (named ${contractName}) claims to be signed with a key with ID ${manifest.signature.keyId}, which is not trusted. The trusted key IDs for this name are:`, Object.keys(rootState.contractSigningKeys[contractNameLookupKey])); + throw new Error(`Invalid or missing signature in manifest ${manifestHash} (named ${contractName}). It claims to be signed with a key with ID ${manifest.signature.keyId}, which has not been authorized for this contract before.`); + } + const signingKey = rootState.contractSigningKeys[contractNameLookupKey][manifest.signature.keyId]; + (0, crypto_1.verifySignature)(signingKey, manifest.body + manifest.head, manifest.signature.value); + console.info(`[chelonia] successful signature verification for ${manifestHash} (named ${contractName}) using the already-trusted key ${manifest.signature.keyId}.`); + signatureValidated = true; + } + // Otherwise, when this is a yet-unseen contract, we parse the body to + // see its allowed signers to trust on first-use (TOFU) + const body = JSON.parse(manifest.body); + // If we don't have a list of authorized signatures yet, verify this + // contract's signature and set the auhorized signing keys + if (!signatureValidated) { + console.info(`[chelonia] verifying signature for ${manifestHash} (named ${contractName}) for the first time`); + if (!(0, turtledash_1.has)(body, 'signingKeys') || !Array.isArray(body.signingKeys)) { + throw new Error(`Invalid manifest file ${manifestHash} (named ${contractName}). Its body doesn't contain a 'signingKeys' list'`); + } + let contractSigningKeys; + try { + contractSigningKeys = Object.fromEntries(body.signingKeys.map((serializedKey) => { + return [ + (0, crypto_1.keyId)(serializedKey), + serializedKey + ]; + })); + } + catch (e) { + console.error(`[chelonia] Error parsing the public keys list for ${manifestHash} (named ${contractName})`, e); + throw e; + } + if (!(0, turtledash_1.has)(contractSigningKeys, manifest.signature.keyId)) { + throw new Error(`Invalid or missing signature in manifest ${manifestHash} (named ${contractName}). It claims to be signed with a key with ID ${manifest.signature.keyId}, which is not listed in its 'signingKeys' field.`); + } + (0, crypto_1.verifySignature)(contractSigningKeys[manifest.signature.keyId], manifest.body + manifest.head, manifest.signature.value); + console.info(`[chelonia] successful signature verification for ${manifestHash} (named ${contractName}) using ${manifest.signature.keyId}. The following key IDs will now be trusted for this contract name`, Object.keys(contractSigningKeys)); + signatureValidated = true; + rootState.contractSigningKeys[contractNameLookupKey] = contractSigningKeys; + } + // If verification was successful, return the parsed body to make the newly- + // loaded contract available + return body; + }, + 'chelonia/private/loadManifest': async function (contractName, manifestHash) { + if (!contractName || typeof contractName !== 'string') { + throw new Error('Invalid or missing contract name'); + } + if (this.manifestToContract[manifestHash]) { + console.warn('[chelonia]: already loaded manifest', manifestHash); + return; + } + const manifestSource = await (0, sbp_1.default)('chelonia/out/fetchResource', manifestHash, { code: functions_js_1.multicodes.SHELTER_CONTRACT_MANIFEST }); + const manifest = JSON.parse(manifestSource); + const body = (0, sbp_1.default)('chelonia/private/verifyManifestSignature', contractName, manifestHash, manifest); + if (body.name !== contractName) { + throw new Error(`Mismatched contract name. Expected ${contractName} but got ${body.name}`); + } + const contractInfo = (this.config.contracts.defaults.preferSlim && body.contractSlim) || body.contract; + console.info(`[chelonia] loading contract '${contractInfo.file}'@'${body.version}' from manifest: ${manifestHash}`); + const source = await (0, sbp_1.default)('chelonia/out/fetchResource', contractInfo.hash, { code: functions_js_1.multicodes.SHELTER_CONTRACT_TEXT }); + const reduceAllow = (acc, v) => { acc[v] = true; return acc; }; + const allowedSels = ['okTurtles.events/on', 'chelonia/defineContract', 'chelonia/out/keyRequest'] + .concat(this.config.contracts.defaults.allowedSelectors) + .reduce(reduceAllow, {}); + const allowedDoms = this.config.contracts.defaults.allowedDomains + .reduce(reduceAllow, {}); + const contractSBP = (selector, ...args) => { + const domain = (0, sbp_1.domainFromSelector)(selector); + if (selector.startsWith(contractName + '/')) { + selector = `${manifestHash}/${selector}`; + } + if (allowedSels[selector] || allowedDoms[domain]) { + return (0, sbp_1.default)(selector, ...args); + } + else { + console.error('[chelonia] selector not on allowlist', { selector, allowedSels, allowedDoms }); + throw new Error(`[chelonia] selector not on allowlist: '${selector}'`); + } + }; + // const saferEval: Function = new FERAL_FUNCTION(` + // eslint-disable-next-line no-new-func + const saferEval = new Function(` + return function (globals) { + // almost a real sandbox + // stops (() => this)().fetch + // needs additional step of locking down Function constructor to stop: + // new (()=>{}).constructor("console.log(typeof this.fetch)")() + globals.self = globals + globals.globalThis = globals + with (new Proxy(globals, { + get (o, p) { return o[p] }, + has (o, p) { /* console.log('has', p); */ return true } + })) { + (function () { + 'use strict' + ${source} + })() + } + } + `)(); + // TODO: lock down Function constructor! could just use SES lockdown() + // or do our own version of it. + // https://github.com/endojs/endo/blob/master/packages/ses/src/tame-function-constructors.js + this.defContractSBP = contractSBP; + this.defContractManifest = manifestHash; + // contracts will also be signed, so even if sandbox breaks we still have protection + saferEval({ + // pass in globals that we want access to by default in the sandbox + // note: you can undefine these by setting them to undefined in exposedGlobals + crypto: { + getRandomValues: (v) => globalThis.crypto.getRandomValues(v) + }, + ...(typeof window === 'object' && window && { + alert: window.alert.bind(window), + confirm: window.confirm.bind(window), + prompt: window.prompt.bind(window) + }), + isNaN, + console, + Object, + Error, + TypeError, + RangeError, + Math, + Symbol, + Date, + Array, + BigInt, + Boolean, + Buffer, + String, + Number, + Int8Array, + Int16Array, + Int32Array, + Uint8Array, + Uint16Array, + Uint32Array, + Float32Array, + Float64Array, + ArrayBuffer, + JSON, + RegExp, + parseFloat, + parseInt, + Promise, + Function, + Map, + WeakMap, + ...this.config.contracts.defaults.exposedGlobals, + require: (dep) => { + return dep === '@sbp/sbp' + ? contractSBP + : this.config.contracts.defaults.modules[dep]; + }, + sbp: contractSBP, + fetchServerTime: async (fallback = true) => { + // If contracts need the current timestamp (for example, for metadata 'createdDate') + // they must call this function so that clients are kept synchronized to the server's + // clock, for consistency, so that if one client's clock is off, it doesn't conflict + // with other client's clocks. + // See: https://github.com/okTurtles/group-income/issues/531 + try { + const response = await this.config.fetch(`${this.config.connectionURL}/time`, { signal: this.abortController.signal }); + return (0, utils_js_1.handleFetchResult)('text')(response); + } + catch (e) { + console.warn('[fetchServerTime] Error', e); + if (fallback) { + return new Date((0, sbp_1.default)('chelonia/time')).toISOString(); + } + throw new errors_js_1.ChelErrorFetchServerTimeFailed('Can not fetch server time. Please check your internet connection.'); + } + } + }); + if (contractName !== this.defContract.name) { + throw new Error(`Invalid contract name for manifest ${manifestHash}. Expected ${contractName} but got ${this.defContract.name}`); + } + this.defContractSelectors.forEach(s => { allowedSels[s] = true; }); + this.manifestToContract[manifestHash] = { + slim: contractInfo === body.contractSlim, + info: contractInfo, + contract: this.defContract + }; + }, + // Warning: avoid using this unless you know what you're doing. Prefer using /remove. + 'chelonia/private/removeImmediately': function (contractID, params) { + const state = (0, sbp_1.default)(this.config.stateSelector); + const contractName = state.contracts[contractID]?.type; + if (!contractName) { + console.error('[chelonia/private/removeImmediately] Missing contract name for contract', { contractID }); + return; + } + const manifestHash = this.config.contracts.manifests[contractName]; + if (manifestHash) { + const destructor = `${manifestHash}/${contractName}/_cleanup`; + // Check if a destructor is defined + if ((0, sbp_1.default)('sbp/selectors/fn', destructor)) { + // And call it + try { + (0, sbp_1.default)(destructor, { contractID, resync: !!params?.resync, state: state[contractID] }); + } + catch (e) { + console.error(`[chelonia/private/removeImmediately] Error at destructor for ${contractID}`, e); + } + } + } + if (params?.resync) { + // If re-syncing, keep the reference count + Object.keys(state.contracts[contractID]) + .filter((k) => k !== 'references') + .forEach((k) => this.config.reactiveDel(state.contracts[contractID], k)); + // If re-syncing, keep state._volatile.watch + Object.keys(state[contractID]) + .filter((k) => k !== '_volatile') + .forEach((k) => this.config.reactiveDel(state[contractID], k)); + if (state[contractID]._volatile) { + Object.keys(state[contractID]._volatile) + .filter((k) => k !== 'watch') + .forEach((k) => this.config.reactiveDel(state[contractID]._volatile, k)); + } + } + else { + delete this.ephemeralReferenceCount[contractID]; + if (params?.permanent) { + // Keep a 'null' state to remember permanently-deleted contracts + // (e.g., when they've been removed from the server) + this.config.reactiveSet(state.contracts, contractID, null); + } + else { + this.config.reactiveDel(state.contracts, contractID); + } + this.config.reactiveDel(state, contractID); + } + this.subscriptionSet.delete(contractID); + // calling this will make pubsub unsubscribe for events on `contractID` + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { + added: [], + removed: [contractID], + permanent: params?.permanent, + resync: params?.resync + }); + }, + // used by, e.g. 'chelonia/contract/wait' + 'chelonia/private/noop': function () { }, + 'chelonia/private/out/sync': function (contractIDs, params) { + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs; + const forcedSync = !!params?.force; + return Promise.all(listOfIds.map(contractID => { + // If this isn't a forced sync and we're already subscribed to the contract, + // only wait on the event queue (as events should come over the subscription) + if (!forcedSync && this.subscriptionSet.has(contractID)) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + // However, if the contract has been marked as dirty (meaning its state + // could be wrong due to newly received encryption keys), sync it anyhow + // (i.e., disregard the force flag and proceed to sync the contract) + if (!rootState[contractID]?._volatile?.dirty) { + return (0, sbp_1.default)('chelonia/private/queueEvent', contractID, ['chelonia/private/noop']); + } + } + // enqueue this invocation in a serial queue to ensure + // handleEvent does not get called on contractID while it's syncing, + // but after it's finished. This is used in tandem with + // queuing the 'chelonia/private/in/handleEvent' selector, defined below. + // This prevents handleEvent getting called with the wrong previousHEAD for an event. + return (0, sbp_1.default)('chelonia/private/queueEvent', contractID, [ + 'chelonia/private/in/syncContract', contractID, params + ]).catch((err) => { + console.error(`[chelonia] failed to sync ${contractID}:`, err); + throw err; // re-throw the error + }); + })); + }, + 'chelonia/private/out/publishEvent': function (entry, { maxAttempts = 5, headers, billableContractID, bearer } = {}, hooks) { + const contractID = entry.contractID(); + const originalEntry = entry; + return (0, sbp_1.default)('chelonia/private/queueEvent', `publish:${contractID}`, async () => { + let attempt = 1; + let lastAttemptedHeight; + // prepublish is asynchronous to allow for cleanly sending messages to + // different contracts + await hooks?.prepublish?.(entry); + const onreceivedHandler = (_contractID, message) => { + if (entry.hash() === message.hash()) { + (0, sbp_1.default)('okTurtles.events/off', events_js_1.EVENT_HANDLED, onreceivedHandler); + hooks.onprocessed(entry); + } + }; + if (typeof hooks?.onprocessed === 'function') { + (0, sbp_1.default)('okTurtles.events/on', events_js_1.EVENT_HANDLED, onreceivedHandler); + } + // auto resend after short random delay + // https://github.com/okTurtles/group-income/issues/608 + while (true) { + // Queued event to ensure that we send the event with whatever the + // 'latest' state may be for that contract (in case we were receiving + // something over the web socket) + // This also ensures that the state doesn't change while reading it + lastAttemptedHeight = entry.height(); + const newEntry = await (0, sbp_1.default)('chelonia/private/queueEvent', contractID, async () => { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + const state = rootState[contractID]; + const isFirstMessage = entry.isFirstMessage(); + if (!state && !isFirstMessage) { + console.info(`[chelonia] Not sending message as contract state has been removed: ${entry.description()}`); + return; + } + if (hooks?.preSendCheck) { + if (!await hooks.preSendCheck(entry, state)) { + console.info(`[chelonia] Not sending message as preSendCheck hook returned non-truish value: ${entry.description()}`); + return; + } + } + // Process message to ensure that it is valid. Should this throw, + // we propagate the error. Calling `processMessage` will perform + // validation by checking signatures, well-formedness and, in the case + // of actions, by also calling both the `validate` method (which + // doesn't mutate the state) and the `process` method (which could + // mutate the state). + // `SPMessage` objects have an implicit `direction` field that's set + // based on how the object was constructed. For messages that will be + // sent to the server (this case), `direction` is set to `outgoing`. + // This `direction` affects how certain errors are reported during + // processing, and is also exposed to contracts (which could then + // alter their behavior based on this) to support some features (such + // as showing users that a certain message is 'pending'). + // Validation ensures that we don't write messages known to be invalid. + // Although those invalid messages will be ignored if sent anyhow, + // sending them is wasteful. + // The only way to know for sure if a message is valid or not is using + // the same logic that would be used if the message was received, + // hence the call to `processMessage`. Validation requires having the + // state and all mutations that would be applied. For example, when + // joining a chatroom, this is usually done by sending an OP_ATOMIC + // that contains OP_KEY_ADD and OP_ACTION_ENCRYPTED. Correctly + // validating this operation requires applying the OP_KEY_ADD to the + // state in order to know whether OP_ACTION_ENCRYPTED has a valid + // signature or not. + // We also rely on this logic to keep different contracts in sync + // when there are side-effects. For example, the side-effect in a + // group for someone joining a chatroom can call the `join` action + // on the chatroom unconditionally, since validation will prevent + // the message from being sent. + // Because of this, 'chelonia/private/in/processMessage' SHOULD NOT + // change the global Chelonia state and it MUST NOT call any + // side-effects or change the global state in a way that affects + // the meaning of any future messages or successive invocations. + // Note: mutations to the contract state, if any, are immediately + // discarded (see the temporary object created using `cloneDeep`). + await (0, sbp_1.default)('chelonia/private/in/processMessage', entry, (0, turtledash_1.cloneDeep)(state || {})); + // if this isn't the first event (i.e., OP_CONTRACT), recreate and + // resend message + // This is mainly to set height and previousHEAD. For the first event, + // this doesn't need to be done because previousHEAD is always undefined + // and height is always 0. + // We always call recreateEvent because we may have received new events + // in the web socket + if (!isFirstMessage) { + return (0, utils_js_1.recreateEvent)(entry, state, rootState.contracts[contractID]); + } + return entry; + }); + // If there is no event to send, return + if (!newEntry) + return; + await hooks?.beforeRequest?.(newEntry, entry); + entry = newEntry; + const r = await this.config.fetch(`${this.config.connectionURL}/event`, { + method: 'POST', + body: entry.serialize(), + headers: { + ...headers, + ...bearer && { + Authorization: `Bearer ${bearer}` + }, + ...billableContractID && { + Authorization: utils_js_1.buildShelterAuthorizationHeader.call(this, billableContractID) + }, + 'Content-Type': 'text/plain' + }, + signal: this.abortController.signal + }); + if (r.ok) { + await hooks?.postpublish?.(entry); + return entry; + } + try { + if (r.status === 409) { + if (attempt + 1 > maxAttempts) { + console.error(`[chelonia] failed to publish ${entry.description()} after ${attempt} attempts`, entry); + throw new Error(`publishEvent: ${r.status} - ${r.statusText}. attempt ${attempt}`); + } + // create new entry + const randDelay = (0, turtledash_1.randomIntFromRange)(0, 1500); + console.warn(`[chelonia] publish attempt ${attempt} of ${maxAttempts} failed. Waiting ${randDelay} msec before resending ${entry.description()}`); + attempt += 1; + await (0, turtledash_1.delay)(randDelay); // wait randDelay ms before sending it again + // TODO: The [pubsub] code seems to miss events that happened between + // a call to sync and the subscription time. This is a temporary measure + // to handle this until [pubsub] is updated. + if (!entry.isFirstMessage() && entry.height() === lastAttemptedHeight) { + await (0, sbp_1.default)('chelonia/private/out/sync', contractID, { force: true }); + } + } + else { + const message = (await r.json())?.message; + console.error(`[chelonia] ERROR: failed to publish ${entry.description()}: ${r.status} - ${r.statusText}: ${message}`, entry); + throw new Error(`publishEvent: ${r.status} - ${r.statusText}: ${message}`); + } + } + catch (e) { + (0, sbp_1.default)('okTurtles.events/off', events_js_1.EVENT_HANDLED, onreceivedHandler); + throw e; + } + } + }).then((entry) => { + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.EVENT_PUBLISHED, { contractID, message: entry, originalMessage: originalEntry }); + return entry; + }).catch((e) => { + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.EVENT_PUBLISHING_ERROR, { contractID, message: entry, originalMessage: originalEntry, error: e }); + throw e; + }); + }, + 'chelonia/private/out/latestHEADinfo': function (contractID) { + return this.config.fetch(`${this.config.connectionURL}/latestHEADinfo/${contractID}`, { + cache: 'no-store', + signal: this.abortController.signal + }).then((0, utils_js_1.handleFetchResult)('json')); + }, + 'chelonia/private/postKeyShare': function (contractID, previousVolatileState, signingKey) { + const cheloniaState = (0, sbp_1.default)(this.config.stateSelector); + const targetState = cheloniaState[contractID]; + if (!targetState) + return; + if (previousVolatileState && (0, turtledash_1.has)(previousVolatileState, 'watch')) { + if (!targetState._volatile) + this.config.reactiveSet(targetState, '_volatile', Object.create(null)); + if (!targetState._volatile.watch) { + this.config.reactiveSet(targetState._volatile, 'watch', previousVolatileState.watch); + } + else if (targetState._volatile.watch !== previousVolatileState.watch) { + previousVolatileState.watch.forEach((pWatch) => { + if (!targetState._volatile.watch.some((tWatch) => { + return (tWatch[0] === pWatch[0]) && (tWatch[1] === pWatch[1]); + })) { + targetState._volatile.watch.push(pWatch); + } + }); + } + } + if (!Array.isArray(targetState._volatile?.pendingKeyRequests)) + return; + this.config.reactiveSet(targetState._volatile, 'pendingKeyRequests', targetState._volatile.pendingKeyRequests.filter((pkr) => pkr?.name !== signingKey.name)); + }, + 'chelonia/private/in/processMessage': async function (message, state, internalSideEffectStack, contractName) { + const [opT, opV] = message.op(); + const hash = message.hash(); + const height = message.height(); + const contractID = message.contractID(); + const manifestHash = message.manifest(); + const signingKeyId = message.signingKeyId(); + const direction = message.direction(); + const config = this.config; + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const opName = Object.entries(SPMessage_js_1.SPMessage).find(([, y]) => y === opT)?.[0]; + console.debug('PROCESSING OPCODE:', opName, 'to', contractID); + if (state?._volatile?.dirty) { + console.debug('IGNORING OPCODE BECAUSE CONTRACT STATE IS MARKED AS DIRTY.', 'OPCODE:', opName, 'CONTRACT:', contractID); + return; + } + if (!state._vm) + state._vm = Object.create(null); + const opFns = { + /* + There are two types of "errors" that we need to consider: + 1. "Ignoring" errors + 2. "Failure" errors + Example: OP_KEY_ADD + 1. IGNORING: an error is thrown because we wanted to add a key but the key we wanted to add is already there. This is not a hard error, it's an ignoring error. We don't care that the operation failed in this case because the intent was accomplished. + 2. FAILURE: an error is thrown while attempting to add a key that doesn't exist. + Example: OP_ACTION_ENCRYPTED + 1. IGNORING: An error is thrown because we don't have the key to decrypt the action. We ignore it. + 2. FAILURE: An error is thrown by the process function during processing. + Handling these in OP_ATOMIC + • ALL errors of class "IGNORING" should be ignored. They should not impact our ability to process the rest of the operations in the OP_ATOMIC. No matter how many of these are thrown, it doesn't affect the rest of the operations. + • ANY error of class "FAILURE" will call the rest of the operations to fail and the state to be reverted to prior to the OP_ATOMIC. No side-effects should be run. Because an intention failed. + */ + async [SPMessage_js_1.SPMessage.OP_ATOMIC](v) { + for (let i = 0; i < v.length; i++) { + const u = v[i]; + try { + if (u[0] === SPMessage_js_1.SPMessage.OP_ATOMIC) + throw new Error('Cannot nest OP_ATOMIC'); + if (!(0, utils_js_1.validateKeyPermissions)(message, config, state, signingKeyId, u[0], u[1])) { + throw new Error('Inside OP_ATOMIC: no matching signing key was defined'); + } + await opFns[u[0]](u[1]); + } + catch (e_) { + const e = e_; + if (e && typeof e === 'object') { + if (e.name === 'ChelErrorDecryptionKeyNotFound') { + console.warn(`[chelonia] [OP_ATOMIC] WARN '${e.name}' in processMessage for ${message.description()}: ${e.message}`, e, message.serialize()); + if (e.cause) { + const missingDecryptionKeyIds = missingDecryptionKeyIdsMap.get(message); + if (missingDecryptionKeyIds) { + missingDecryptionKeyIds.add(e.cause); + } + else { + missingDecryptionKeyIdsMap.set(message, new Set([e.cause])); + } + } + continue; + } + else { + (0, utils_js_1.logEvtError)(message, `[chelonia] [OP_ATOMIC] ERROR '${e.name}' in processMessage for ${message.description()}: ${e.message || e}`, e, message.serialize()); + } + console.warn(`[chelonia] [OP_ATOMIC] Error processing ${message.description()}: ${message.serialize()}. Any side effects will be skipped!`); + if (config.strictProcessing) { + throw e; + } + config.hooks.processError?.(e, message, getMsgMeta.call(self, message, contractID, state)); + if (e.name === 'ChelErrorWarning') + continue; + } + else { + (0, utils_js_1.logEvtError)(message, 'Inside OP_ATOMIC: Non-object or null error thrown', contractID, message, i, e); + } + throw e; + } + } + }, + [SPMessage_js_1.SPMessage.OP_CONTRACT](v) { + state._vm.type = v.type; + const keys = keysToMap.call(self, v.keys, height); + state._vm.authorizedKeys = keys; + // Loop through the keys in the contract and try to decrypt all of the private keys + // Example: in the identity contract you have the IEK, IPK, CSK, and CEK. + // When you login you have the IEK which is derived from your password, and you + // will use it to decrypt the rest of the keys which are encrypted with that. + // Specifically, the IEK is used to decrypt the CSKs and the CEKs, which are + // the encrypted versions of the CSK and CEK. + utils_js_1.keyAdditionProcessor.call(self, message, hash, v.keys, state, contractID, signingKey, internalSideEffectStack); + }, + [SPMessage_js_1.SPMessage.OP_ACTION_ENCRYPTED](v) { + if (config.skipActionProcessing) { + if (!config.skipDecryptionAttempts) { + console.log('OP_ACTION_ENCRYPTED: skipped action processing'); + } + return; + } + return opFns[SPMessage_js_1.SPMessage.OP_ACTION_UNENCRYPTED](v.valueOf()); + }, + async [SPMessage_js_1.SPMessage.OP_ACTION_UNENCRYPTED](v) { + if (!config.skipActionProcessing) { + let innerSigningKeyId; + if ((0, signedData_js_1.isSignedData)(v)) { + innerSigningKeyId = v.signingKeyId; + v = v.valueOf(); + } + const { data, meta, action } = v; + if (!config.whitelisted(action)) { + throw new Error(`chelonia: action not whitelisted: '${action}'`); + } + await (0, sbp_1.default)(`${manifestHash}/${action}/process`, { + data, + meta, + hash, + height, + contractID, + direction: message.direction(), + signingKeyId, + get signingContractID() { + return (0, utils_js_1.getContractIDfromKeyId)(contractID, signingKeyId, state); + }, + innerSigningKeyId, + get innerSigningContractID() { + return (0, utils_js_1.getContractIDfromKeyId)(contractID, innerSigningKeyId, state); + } + }, state); + } + }, + [SPMessage_js_1.SPMessage.OP_KEY_SHARE](wv) { + // TODO: Prompt to user if contract not in pending + const data = config.unwrapMaybeEncryptedData(wv); + if (!data) + return; + const v = data.data; + for (const key of v.keys) { + if (key.id && key.meta?.private?.content) { + if (!(0, turtledash_1.has)(state._vm, 'sharedKeyIds')) + state._vm.sharedKeyIds = []; + if (!state._vm.sharedKeyIds.some((sK) => sK.id === key.id)) + state._vm.sharedKeyIds.push({ id: key.id, contractID: v.contractID, height, keyRequestHash: v.keyRequestHash, keyRequestHeight: v.keyRequestHeight }); + } + } + // If this is a response to an OP_KEY_REQUEST (marked by the + // presence of the keyRequestHash attribute), then we'll mark the + // key request as completed + // TODO: Verify that the keyRequestHash is what we expect (on the + // other contact's state, we should have a matching structure in + // state._volatile.pendingKeyRequests = [ + // { contractID: "this", name: "name of this signingKeyId", reference: "this reference", hash: "KA" }, ..., but we don't + // have a copy of the keyRequestHash (this would need a new + // message to ourselves in the KR process), so for now we trust + // that if it has keyRequestHash, it's a response to a request + // we sent. + // For similar reasons, we can't check pendingKeyRequests, because + // depending on how and in which order events are processed, it may + // not be available. + // ] + if ((0, turtledash_1.has)(v, 'keyRequestHash') && state._vm.authorizedKeys[signingKeyId].meta?.keyRequest) { + state._vm.authorizedKeys[signingKeyId].meta.keyRequest.responded = hash; + } + internalSideEffectStack?.push(async () => { + delete self.postSyncOperations[contractID]?.['pending-keys-for-' + v.contractID]; + const cheloniaState = (0, sbp_1.default)(self.config.stateSelector); + const targetState = cheloniaState[v.contractID]; + const missingDecryptionKeyIds = cheloniaState.contracts[v.contractID]?.missingDecryptionKeyIds; + let newestEncryptionKeyHeight = Number.POSITIVE_INFINITY; + for (const key of v.keys) { + if (key.id && key.meta?.private?.content) { + // Outgoing messages' keys are always transient + const transient = direction === 'outgoing' || key.meta.private.transient; + if (!(0, sbp_1.default)('chelonia/haveSecretKey', key.id, !transient)) { + try { + const decrypted = key.meta.private.content.valueOf(); + (0, sbp_1.default)('chelonia/storeSecretKeys', new Secret_js_1.Secret([{ + key: (0, crypto_1.deserializeKey)(decrypted), + transient + }])); + // If we've just received a known missing key (i.e., a key + // that previously resulted in a decryption error), we know + // our state is outdated and we need to re-sync the contract + if (missingDecryptionKeyIds?.includes(key.id)) { + newestEncryptionKeyHeight = Number.NEGATIVE_INFINITY; + } + else if ( + // Otherwise, we make an educated guess on whether a re-sync + // is needed based on the height. + targetState?._vm?.authorizedKeys?.[key.id]?._notBeforeHeight != null && + Array.isArray(targetState._vm.authorizedKeys[key.id].purpose) && + targetState._vm.authorizedKeys[key.id].purpose.includes('enc')) { + newestEncryptionKeyHeight = Math.min(newestEncryptionKeyHeight, targetState._vm.authorizedKeys[key.id]._notBeforeHeight); + } + } + catch (e_) { + const e = e_; + if (e?.name === 'ChelErrorDecryptionKeyNotFound') { + console.warn(`OP_KEY_SHARE (${hash} of ${contractID}) missing secret key: ${e.message}`, e); + } + else { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`OP_KEY_SHARE (${hash} of ${contractID}) error '${e.message || e}':`, e); + } + } + } + } + } + // If an encryption key has been shared with _notBefore lower than the + // current height, then the contract must be resynced. + const mustResync = !!(newestEncryptionKeyHeight < cheloniaState.contracts[v.contractID]?.height); + if (mustResync) { + if (!(0, turtledash_1.has)(targetState, '_volatile')) + config.reactiveSet(targetState, '_volatile', Object.create(null)); + config.reactiveSet(targetState._volatile, 'dirty', true); + if (!Object.keys(targetState).some((k) => k !== '_volatile')) { + // If the contract only has _volatile state, we don't force sync it + return; + } + // Mark contracts that have foreign keys that have been received + // as dirty + // First, we group watched keys by key and contracts + const keyDict = Object.create(null); + targetState._volatile?.watch?.forEach(([keyName, contractID]) => { + if (!keyDict[keyName]) { + keyDict[keyName] = [contractID]; + return; + } + keyDict[keyName].push(contractID); + }); + // Then, see which of those contracts need to be updated + const contractIdsToUpdate = Array.from(new Set(Object.entries(keyDict).flatMap(([keyName, contractIDs]) => { + const keyId = (0, utils_js_1.findKeyIdByName)(targetState, keyName); + if ( + // Does the key exist? (i.e., is it a current key) + keyId && + // Is it an encryption key? (signing keys don't build up a + // potentially invalid state because the private key isn't + // required for validation; however, missing encryption keys + // prevent message processing) + targetState._vm.authorizedKeys[keyId].purpose.includes('enc') && + // Is this a newly set key? (avoid re-syncing contracts that + // haven't been affected by the `OP_KEY_SHARE`) + targetState._vm.authorizedKeys[keyId]._notBeforeHeight >= newestEncryptionKeyHeight) { + return contractIDs; + } + return []; + }))); + // Mark these contracts as dirty + contractIdsToUpdate.forEach((contractID) => { + const targetState = cheloniaState[contractID]; + if (!targetState) + return; + if (!(0, turtledash_1.has)(targetState, '_volatile')) + config.reactiveSet(targetState, '_volatile', Object.create(null)); + config.reactiveSet(targetState._volatile, 'dirty', true); + }); + // Since we have received new keys, the current contract state might be wrong, so we need to remove the contract and resync + // Note: The following may be problematic when several tabs are open + // sharing the same state. This is more of a general issue in this + // situation, not limited to the following sequence of events + if (self.subscriptionSet.has(v.contractID)) { + const resync = (0, sbp_1.default)('chelonia/private/queueEvent', v.contractID, [ + 'chelonia/private/in/syncContract', v.contractID + ]).then(() => { + // Now, if we're subscribed to any of the contracts that were + // marked as dirty, re-sync them + (0, sbp_1.default)('chelonia/private/out/sync', contractIdsToUpdate.filter((contractID) => { + return self.subscriptionSet.has(contractID); + }), { force: true, resync: true }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error('[chelonia] Error resyncing contracts with foreign key references after key rotation', e); + }); + }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`[chelonia] Error during sync for ${v.contractID} during OP_KEY_SHARE for ${contractID}`); + if (v.contractID === contractID) { + throw e; + } + }); + // If the keys received were for the current contract, we can't + // use queueEvent as we're already on that same queue + if (v.contractID !== contractID) { + await resync; + } + } + } + const previousVolatileState = targetState?._volatile; + (0, sbp_1.default)('chelonia/private/queueEvent', v.contractID, ['chelonia/private/postKeyShare', v.contractID, mustResync ? previousVolatileState : null, signingKey]) + .then(() => { + // The CONTRACT_HAS_RECEIVED_KEYS event is placed on the queue for + // the current contract so that calling + // 'chelonia/contract/waitingForKeyShareTo' will give correct results + // (i.e., the event is processed after the state is written) + (0, sbp_1.default)('chelonia/private/queueEvent', contractID, () => { + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.CONTRACT_HAS_RECEIVED_KEYS, { contractID: v.contractID, sharedWithContractID: contractID, signingKeyId, get signingKeyName() { return state._vm?.authorizedKeys?.[signingKeyId]?.name; } }); + }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`[chelonia] Error while emitting the CONTRACT_HAS_RECEIVED_KEYS event for ${contractID}`, e); + }); + }); + }); + }, + [SPMessage_js_1.SPMessage.OP_KEY_REQUEST](wv) { + const data = config.unwrapMaybeEncryptedData(wv); + // If we're unable to decrypt the OP_KEY_REQUEST, then still + // proceed to do accounting of invites + const v = data?.data || { contractID: '(private)', replyWith: { context: undefined }, request: '*' }; + const originatingContractID = v.contractID; + if (state._vm?.invites?.[signingKeyId]?.quantity != null) { + if (state._vm.invites[signingKeyId].quantity > 0) { + if ((--state._vm.invites[signingKeyId].quantity) <= 0) { + state._vm.invites[signingKeyId].status = constants_js_1.INVITE_STATUS.USED; + } + } + else { + (0, utils_js_1.logEvtError)(message, 'Ignoring OP_KEY_REQUEST because it exceeds allowed quantity: ' + originatingContractID); + return; + } + } + if (state._vm?.invites?.[signingKeyId]?.expires != null) { + if (state._vm.invites[signingKeyId].expires < Date.now()) { + (0, utils_js_1.logEvtError)(message, 'Ignoring OP_KEY_REQUEST because it expired at ' + state._vm.invites[signingKeyId].expires + ': ' + originatingContractID); + return; + } + } + // If skipping porocessing or if the message is outgoing, there isn't + // anything else to do + if (config.skipActionProcessing || direction === 'outgoing') { + return; + } + // Outgoing messages don't have a context attribute + if (!(0, turtledash_1.has)(v.replyWith, 'context')) { + (0, utils_js_1.logEvtError)(message, 'Ignoring OP_KEY_REQUEST because it is missing the context attribute'); + return; + } + const context = v.replyWith.context; + if (data && (!Array.isArray(context) || context[0] !== originatingContractID)) { + (0, utils_js_1.logEvtError)(message, 'Ignoring OP_KEY_REQUEST because it is signed by the wrong contract'); + return; + } + if (v.request !== '*') { + (0, utils_js_1.logEvtError)(message, 'Ignoring OP_KEY_REQUEST because it has an unsupported request attribute', v.request); + return; + } + if (!state._vm.pendingKeyshares) + state._vm.pendingKeyshares = Object.create(null); + state._vm.pendingKeyshares[message.hash()] = context + ? [ + // Full-encryption (i.e., KRS encryption) requires that this request + // was encrypted and that the invite is marked as private + !!data?.encryptionKeyId, + message.height(), + signingKeyId, + context + ] + : [ + !!data?.encryptionKeyId, + message.height(), + signingKeyId + ]; + // Call 'chelonia/private/respondToAllKeyRequests' after sync + if (data) { + internalSideEffectStack?.push(() => { + self.setPostSyncOp(contractID, 'respondToAllKeyRequests-' + message.contractID(), ['chelonia/private/respondToAllKeyRequests', contractID]); + }); + } + }, + [SPMessage_js_1.SPMessage.OP_KEY_REQUEST_SEEN](wv) { + if (config.skipActionProcessing) { + return; + } + // TODO: Handle boolean (success) value + const data = config.unwrapMaybeEncryptedData(wv); + if (!data) + return; + const v = data.data; + if (state._vm.pendingKeyshares && v.keyRequestHash in state._vm.pendingKeyshares) { + const hash = v.keyRequestHash; + const pending = state._vm.pendingKeyshares[hash]; + delete state._vm.pendingKeyshares[hash]; + if (pending.length !== 4) + return; + // If we were able to respond, clean up responders + const keyId = pending[2]; + const originatingContractID = pending[3][0]; + if (Array.isArray(state._vm?.invites?.[keyId]?.responses)) { + state._vm?.invites?.[keyId]?.responses.push(originatingContractID); + } + if (!(0, turtledash_1.has)(state._vm, 'keyshares')) + state._vm.keyshares = Object.create(null); + const success = v.success; + state._vm.keyshares[hash] = { + contractID: originatingContractID, + height, + success, + ...(success && { + hash: v.keyShareHash + }) + }; + } + }, + [SPMessage_js_1.SPMessage.OP_PROP_DEL]: notImplemented, + [SPMessage_js_1.SPMessage.OP_PROP_SET](v) { + if (!state._vm.props) + state._vm.props = {}; + state._vm.props[v.key] = v.value; + }, + [SPMessage_js_1.SPMessage.OP_KEY_ADD](v) { + const keys = keysToMap.call(self, v, height, state._vm.authorizedKeys); + const keysArray = Object.values(v); + keysArray.forEach((k) => { + if ((0, turtledash_1.has)(state._vm.authorizedKeys, k.id) && state._vm.authorizedKeys[k.id]._notAfterHeight == null) { + throw new errors_js_1.ChelErrorWarning('Cannot use OP_KEY_ADD on existing keys. Key ID: ' + k.id); + } + }); + utils_js_1.validateKeyAddPermissions.call(self, contractID, signingKey, state, v); + state._vm.authorizedKeys = { ...state._vm.authorizedKeys, ...keys }; + utils_js_1.keyAdditionProcessor.call(self, message, hash, v, state, contractID, signingKey, internalSideEffectStack); + }, + [SPMessage_js_1.SPMessage.OP_KEY_DEL](v) { + if (!state._vm.authorizedKeys) + state._vm.authorizedKeys = Object.create(null); + if (!state._volatile) + state._volatile = Object.create(null); + if (!state._volatile.pendingKeyRevocations) + state._volatile.pendingKeyRevocations = Object.create(null); + utils_js_1.validateKeyDelPermissions.call(self, contractID, signingKey, state, v); + const keyIds = v.map((k) => { + const data = config.unwrapMaybeEncryptedData(k); + if (!data) + return undefined; + return data.data; + }).filter((keyId) => { + if (!keyId || typeof keyId !== 'string') + return false; + if (!(0, turtledash_1.has)(state._vm.authorizedKeys, keyId) || state._vm.authorizedKeys[keyId]._notAfterHeight != null) { + console.warn('Attempted to delete non-existent key from contract', { contractID, keyId }); + return false; + } + return true; + }); + keyIds.forEach((keyId) => { + const key = state._vm.authorizedKeys[keyId]; + state._vm.authorizedKeys[keyId]._notAfterHeight = height; + if ((0, turtledash_1.has)(state._volatile.pendingKeyRevocations, keyId)) { + delete state._volatile.pendingKeyRevocations[keyId]; + } + // Are we deleting a foreign key? If so, we also need to remove + // the operation from (1) _volatile.watch (on the other contract) + // and (2) pendingWatch + if (key.foreignKey) { + const fkUrl = new URL(key.foreignKey); + const foreignContract = fkUrl.pathname; + const foreignKeyName = fkUrl.searchParams.get('keyName'); + if (!foreignContract || !foreignKeyName) + throw new Error('Invalid foreign key: missing contract or key name'); + internalSideEffectStack?.push(() => { + (0, sbp_1.default)('chelonia/private/queueEvent', foreignContract, () => { + const rootState = (0, sbp_1.default)(config.stateSelector); + if (Array.isArray(rootState[foreignContract]?._volatile?.watch)) { + // Stop watching events for this key + const oldWatch = rootState[foreignContract]._volatile.watch; + rootState[foreignContract]._volatile.watch = oldWatch.filter(([name, cID]) => name !== foreignKeyName || cID !== contractID); + if (oldWatch.length !== rootState[foreignContract]._volatile.watch.length) { + // If the number of foreign keys changed, maybe there's no + // reason to remain subscribed to this contract. In this + // case, attempt to release it. + (0, sbp_1.default)('chelonia/contract/release', foreignContract, { try: true }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`[chelonia] Error at OP_KEY_DEL internalSideEffectStack while attempting to release foreign contract ${foreignContract}`, e); + }); + } + } + }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error('Error stopping watching events after removing key', { contractID, foreignContract, foreignKeyName, fkUrl }, e); + }); + }); + const pendingWatch = state._vm.pendingWatch?.[foreignContract]; + if (pendingWatch) { + state._vm.pendingWatch[foreignContract] = pendingWatch.filter(([, kId]) => kId !== keyId); + } + } + // Set the status to revoked for invite keys + if (key.name.startsWith('#inviteKey-') && state._vm.invites[key.id]) { + state._vm.invites[key.id].status = constants_js_1.INVITE_STATUS.REVOKED; + } + }); + // Check state._volatile.watch for contracts that should be + // mirroring this operation + if (Array.isArray(state._volatile?.watch)) { + const updatedKeysMap = Object.create(null); + keyIds.forEach((keyId) => { + updatedKeysMap[state._vm.authorizedKeys[keyId].name] = { + name: state._vm.authorizedKeys[keyId].name, + oldKeyId: keyId + }; + }); + keyRotationHelper(contractID, state, config, updatedKeysMap, [SPMessage_js_1.SPMessage.OP_KEY_DEL], 'chelonia/out/keyDel', (name) => updatedKeysMap[name[0]].oldKeyId, internalSideEffectStack); + } + }, + [SPMessage_js_1.SPMessage.OP_KEY_UPDATE](v) { + if (!state._volatile) + state._volatile = Object.create(null); + if (!state._volatile.pendingKeyRevocations) + state._volatile.pendingKeyRevocations = Object.create(null); + const [updatedKeys, updatedMap] = utils_js_1.validateKeyUpdatePermissions.call(self, contractID, signingKey, state, v); + const keysToDelete = Object.values(updatedMap); + for (const keyId of keysToDelete) { + if ((0, turtledash_1.has)(state._volatile.pendingKeyRevocations, keyId)) { + delete state._volatile.pendingKeyRevocations[keyId]; + } + state._vm.authorizedKeys[keyId]._notAfterHeight = height; + } + for (const key of updatedKeys) { + if (!(0, turtledash_1.has)(state._vm.authorizedKeys, key.id)) { + key._notBeforeHeight = height; + state._vm.authorizedKeys[key.id] = (0, turtledash_1.cloneDeep)(key); + } + } + utils_js_1.keyAdditionProcessor.call(self, message, hash, updatedKeys, state, contractID, signingKey, internalSideEffectStack); + // Check state._volatile.watch for contracts that should be + // mirroring this operation + if (Array.isArray(state._volatile?.watch)) { + const updatedKeysMap = Object.create(null); + updatedKeys.forEach((key) => { + if (key.data) { + updatedKeysMap[key.name] = (0, turtledash_1.cloneDeep)(key); + updatedKeysMap[key.name].oldKeyId = updatedMap[key.id]; + } + }); + keyRotationHelper(contractID, state, config, updatedKeysMap, [SPMessage_js_1.SPMessage.OP_KEY_UPDATE], 'chelonia/out/keyUpdate', (name) => ({ + name: name[1], + oldKeyId: updatedKeysMap[name[0]].oldKeyId, + id: updatedKeysMap[name[0]].id, + data: updatedKeysMap[name[0]].data + }), internalSideEffectStack); + } + }, + [SPMessage_js_1.SPMessage.OP_PROTOCOL_UPGRADE]: notImplemented + }; + if (!this.config.skipActionProcessing && !this.manifestToContract[manifestHash]) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + // Having rootState.contracts[contractID] is not enough to determine we + // have previously synced this contract, as reference counts are also + // stored there. Hence, we check for the presence of 'type' + if (!contractName) { + contractName = (0, turtledash_1.has)(rootState.contracts, contractID) && rootState.contracts[contractID] && (0, turtledash_1.has)(rootState.contracts[contractID], 'type') + ? rootState.contracts[contractID].type + : opT === SPMessage_js_1.SPMessage.OP_CONTRACT + ? opV.type + : ''; + } + if (!contractName) { + throw new Error(`Unable to determine the name for a contract and refusing to load it (contract ID was ${contractID} and its manifest hash was ${manifestHash})`); + } + await (0, sbp_1.default)('chelonia/private/loadManifest', contractName, manifestHash); + } + let processOp = true; + if (config.preOp) { + processOp = config.preOp(message, state) !== false && processOp; + } + let signingKey; + // Signature verification + { + // This sync code has potential issues + // The first issue is that it can deadlock if there are circular references + // The second issue is that it doesn't handle key rotation. If the key used for signing is invalidated / removed from the originating contract, we won't have it in the state + // Both of these issues can be resolved by introducing a parameter with the message ID the state is based on. This requires implementing a separate, ephemeral, state container for operations that refer to a different contract. + // The difficulty of this is how to securely determine the message ID to use. + // The server can assist with this. + const stateForValidation = opT === SPMessage_js_1.SPMessage.OP_CONTRACT && !state?._vm?.authorizedKeys + ? { + _vm: { + authorizedKeys: keysToMap.call(this, opV.keys, height) + } + } + : state; + // Verify that the signing key is found, has the correct purpose and is + // allowed to sign this particular operation + if (!(0, utils_js_1.validateKeyPermissions)(message, config, stateForValidation, signingKeyId, opT, opV)) { + throw new Error('No matching signing key was defined'); + } + signingKey = stateForValidation._vm.authorizedKeys[signingKeyId]; + } + if (config[`preOp_${opT}`]) { + processOp = config[`preOp_${opT}`](message, state) !== false && processOp; + } + if (processOp) { + await opFns[opT](opV); + config.postOp?.(message, state); + config[`postOp_${opT}`]?.(message, state); // hack to fix syntax highlighting ` + } + }, + 'chelonia/private/in/enqueueHandleEvent': function (contractID, event) { + // make sure handleEvent is called AFTER any currently-running invocations + // to 'chelonia/private/out/sync', to prevent gi.db from throwing + // "bad previousHEAD" errors + return (0, sbp_1.default)('chelonia/private/queueEvent', contractID, async () => { + await (0, sbp_1.default)('chelonia/private/in/handleEvent', contractID, event); + // Before the next operation is enqueued, enqueue post sync ops. This + // makes calling `/wait` more reliable + (0, sbp_1.default)('chelonia/private/enqueuePostSyncOps', contractID); + }); + }, + 'chelonia/private/in/syncContract': async function (contractID, params) { + const state = (0, sbp_1.default)(this.config.stateSelector); + if (state.contracts[contractID] === null) { + throw new errors_js_1.ChelErrorResourceGone('Cannot sync permanently deleted contract ' + contractID); + } + try { + this.currentSyncs[contractID] = { firstSync: !state.contracts[contractID]?.type }; + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.CONTRACT_IS_SYNCING, contractID, true); + const currentVolatileState = state[contractID]?._volatile || Object.create(null); + // If the dirty flag is set (indicating that new encryption keys were received), + // we remove the current state before syncing (this has the effect of syncing + // from the beginning, recreating the entire state). When this is the case, + // the _volatile state is preserved + if (currentVolatileState?.dirty || params?.resync) { + delete currentVolatileState.dirty; + currentVolatileState.resyncing = true; + (0, sbp_1.default)('chelonia/private/removeImmediately', contractID, { resync: true }); + this.config.reactiveSet(state, contractID, Object.create(null)); + this.config.reactiveSet(state[contractID], '_volatile', currentVolatileState); + } + const { HEAD: latestHEAD } = await (0, sbp_1.default)('chelonia/out/latestHEADInfo', contractID); + console.debug(`[chelonia] syncContract: ${contractID} latestHash is: ${latestHEAD}`); + // there is a chance two users are logged in to the same machine and must check their contracts before syncing + const { HEAD: recentHEAD, height: recentHeight } = state.contracts[contractID] || {}; + const isSubscribed = this.subscriptionSet.has(contractID); + if (!isSubscribed) { + const entry = this.pending.find((entry) => entry?.contractID === contractID); + // we're syncing a contract for the first time, make sure to add to pending + // so that handleEvents knows to expect events from this contract + if (!entry) { + this.pending.push({ contractID }); + } + } + this.postSyncOperations[contractID] = this.postSyncOperations[contractID] ?? Object.create(null); + if (latestHEAD !== recentHEAD) { + console.debug(`[chelonia] Synchronizing Contract ${contractID}: our recent was ${recentHEAD || 'undefined'} but the latest is ${latestHEAD}`); + // TODO: fetch events from localStorage instead of server if we have them + const eventsStream = (0, sbp_1.default)('chelonia/out/eventsAfter', contractID, recentHeight ?? 0, undefined, recentHEAD ?? contractID); + // Sanity check: verify event with latest hash exists in list of events + // TODO: using findLastIndex, it will be more clean but it needs Cypress 9.7+ which has bad performance + // https://docs.cypress.io/guides/references/changelog#9-7-0 + // https://github.com/cypress-io/cypress/issues/22868 + let latestHashFound = false; + const eventReader = eventsStream.getReader(); + // remove the first element in cases where we are not getting the contract for the first time + for (let skip = (0, turtledash_1.has)(state.contracts, contractID) && (0, turtledash_1.has)(state.contracts[contractID], 'HEAD');; skip = false) { + const { done, value: event } = await eventReader.read(); + if (done) { + if (!latestHashFound) { + throw new errors_js_1.ChelErrorForkedChain(`expected hash ${latestHEAD} in list of events for contract ${contractID}`); + } + break; + } + if (!latestHashFound) { + latestHashFound = SPMessage_js_1.SPMessage.deserializeHEAD(event).hash === latestHEAD; + } + if (skip) + continue; + // this must be called directly, instead of via enqueueHandleEvent + await (0, sbp_1.default)('chelonia/private/in/handleEvent', contractID, event); + } + } + else if (!isSubscribed) { + this.subscriptionSet.add(contractID); + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { added: [contractID], removed: [] }); + const entryIndex = this.pending.findIndex((entry) => entry?.contractID === contractID); + if (entryIndex !== -1) { + this.pending.splice(entryIndex, 1); + } + console.debug(`[chelonia] added already synchronized ${contractID} to subscription set`); + } + else { + console.debug(`[chelonia] contract ${contractID} was already synchronized`); + } + // Do not await here as the post-sync ops might themselves might be + // waiting on the same queue, causing a deadlock + (0, sbp_1.default)('chelonia/private/enqueuePostSyncOps', contractID); + } + catch (e) { + console.error(`[chelonia] syncContract error: ${e.message || e}`, e); + this.config.hooks.syncContractError?.(e, contractID); + throw e; + } + finally { + if (state[contractID]?._volatile?.resyncing) { + this.config.reactiveDel(state[contractID]._volatile, 'resyncing'); + } + delete this.currentSyncs[contractID]; + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.CONTRACT_IS_SYNCING, contractID, false); + } + }, + 'chelonia/private/enqueuePostSyncOps': function (contractID) { + if (!(0, turtledash_1.has)(this.postSyncOperations, contractID)) + return; + // Iterate over each post-sync operation associated with the given contractID. + Object.entries(this.postSyncOperations[contractID]).forEach(([key, op]) => { + // Remove the operation which is about to be handled so that subsequent + // calls to this selector don't result in repeat calls to the post-sync op + delete this.postSyncOperations[contractID][key]; + // Queue the current operation for execution. + // Note that we do _not_ await because it could be unsafe to do so. + // If the operation fails for some reason, just log the error. + (0, sbp_1.default)('chelonia/private/queueEvent', contractID, op).catch((e) => { + console.error(`Post-sync operation for ${contractID} failed`, { contractID, op, error: e }); + }); + }); + }, + 'chelonia/private/watchForeignKeys': function (externalContractID) { + const state = (0, sbp_1.default)(this.config.stateSelector); + const externalContractState = state[externalContractID]; + const pendingWatch = externalContractState?._vm?.pendingWatch; + if (!pendingWatch || !Object.keys(pendingWatch).length) + return; + const signingKey = (0, utils_js_1.findSuitableSecretKeyId)(externalContractState, [SPMessage_js_1.SPMessage.OP_KEY_DEL], ['sig']); + const canMirrorOperations = !!signingKey; + // Only sync contract if we are actually able to mirror key operations + // This avoids exponentially growing the number of contracts that we need + // to be subscribed to. + // Otherwise, every time there is a foreign key, we would subscribe to that + // contract, plus the contracts referenced by the foreign keys of that + // contract, plus those contracts referenced by the foreign keys of those + // other contracts and so on. + if (!canMirrorOperations) { + console.info('[chelonia/private/watchForeignKeys]: Returning as operations cannot be mirrored', { externalContractID }); + return; + } + // For each pending watch operation, queue a synchronization event in the + // respective contract queue + Object.entries(pendingWatch).forEach(([contractID, keys]) => { + if (!Array.isArray(keys) || + // Check that the keys exist and haven't been revoked + !keys.reduce((acc, [, id]) => { + return acc || (0, turtledash_1.has)(externalContractState._vm.authorizedKeys, id); + }, false)) { + console.info('[chelonia/private/watchForeignKeys]: Skipping as none of the keys to watch exist', { + externalContractID, + contractID + }); + return; + } + (0, sbp_1.default)('chelonia/private/queueEvent', contractID, ['chelonia/private/in/syncContractAndWatchKeys', contractID, externalContractID]).catch((e) => { + console.error(`Error at syncContractAndWatchKeys for contractID ${contractID} and externalContractID ${externalContractID}`, e); + }); + }); + }, + 'chelonia/private/in/syncContractAndWatchKeys': async function (contractID, externalContractID) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + const externalContractState = rootState[externalContractID]; + const pendingWatch = externalContractState?._vm?.pendingWatch?.[contractID]?.splice(0); + // We duplicate the check in 'chelonia/private/watchForeignKeys' because + // new events may have been received in the meantime. This avoids + // unnecessarily subscribing to the contract + if (!Array.isArray(pendingWatch) || + // Check that the keys exist and haven't been revoked + !pendingWatch.reduce((acc, [, id]) => { + return acc || ((0, turtledash_1.has)(externalContractState._vm.authorizedKeys, id) && + (0, utils_js_1.findKeyIdByName)(externalContractState, externalContractState._vm.authorizedKeys[id].name) != null); + }, false)) { + console.info('[chelonia/private/syncContractAndWatchKeys]: Skipping as none of the keys to watch exist', { + externalContractID, + contractID + }); + return; + } + // We check this.subscriptionSet to see if we're already + // subscribed to the contract; if not, we call sync. + if (!this.subscriptionSet.has(contractID)) { + await (0, sbp_1.default)('chelonia/private/in/syncContract', contractID); + } + const contractState = rootState[contractID]; + const keysToDelete = []; + const keysToUpdate = []; + pendingWatch.forEach(([keyName, externalId]) => { + // Does the key exist? If not, it has probably been removed and instead + // of waiting, we need to remove it ourselves + const keyId = (0, utils_js_1.findKeyIdByName)(contractState, keyName); + if (!keyId) { + keysToDelete.push(externalId); + return; + } + else if (keyId !== externalId) { + // Or, the key has been updated and we need to update it in the external + // contract as well + keysToUpdate.push(externalId); + } + // Add keys to watchlist as another contract is waiting on these + // operations + if (!contractState._volatile) { + this.config.reactiveSet(contractState, '_volatile', Object.create(null, { watch: { value: [[keyName, externalContractID]], configurable: true, enumerable: true, writable: true } })); + } + else { + if (!contractState._volatile.watch) + this.config.reactiveSet(contractState._volatile, 'watch', [[keyName, externalContractID]]); + if (Array.isArray(contractState._volatile.watch) && !contractState._volatile.watch.find((v) => v[0] === keyName && v[1] === externalContractID)) + contractState._volatile.watch.push([keyName, externalContractID]); + } + }); + // If there are keys that need to be revoked, queue an event to handle the + // deletion + if (keysToDelete.length || keysToUpdate.length) { + if (!externalContractState._volatile) { + this.config.reactiveSet(externalContractState, '_volatile', Object.create(null)); + } + if (!externalContractState._volatile.pendingKeyRevocations) { + this.config.reactiveSet(externalContractState._volatile, 'pendingKeyRevocations', Object.create(null)); + } + keysToDelete.forEach((id) => this.config.reactiveSet(externalContractState._volatile.pendingKeyRevocations, id, 'del')); + keysToUpdate.forEach((id) => this.config.reactiveSet(externalContractState._volatile.pendingKeyRevocations, id, true)); + (0, sbp_1.default)('chelonia/private/queueEvent', externalContractID, ['chelonia/private/deleteOrRotateRevokedKeys', externalContractID]).catch((e) => { + console.error(`Error at deleteOrRotateRevokedKeys for contractID ${contractID} and externalContractID ${externalContractID}`, e); + }); + } + }, + // The following function gets called when we start watching a contract for + // foreign keys for the first time, and it ensures that, at the point the + // watching starts, keys are in sync between the two contracts (later on, + // this will be handled automatically for incoming OP_KEY_DEL and + // OP_KEY_UPDATE). + // For any given foreign key, there are three possible states: + // 1. The key is in sync with the foreign contract. In this case, there's + // nothing left to do. + // 2. The key has been rotated in the foreign contract (replaced by another + // key of the same name). We need to mirror this operation manually + // since watching only affects new messages we receive. + // 3. The key has been removed in the foreign contract. We also need to + // mirror the operation. + 'chelonia/private/deleteOrRotateRevokedKeys': function (contractID) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + const contractState = rootState[contractID]; + const pendingKeyRevocations = contractState?._volatile?.pendingKeyRevocations; + if (!pendingKeyRevocations || Object.keys(pendingKeyRevocations).length === 0) + return; + // First, we handle keys that have been rotated + const keysToUpdate = Object.entries(pendingKeyRevocations).filter(([, v]) => v === true).map(([id]) => id); + // Aggregate the keys that we can update to send them in a single operation + const [, keyUpdateSigningKeyId, keyUpdateArgs] = keysToUpdate.reduce((acc, keyId) => { + const key = contractState._vm?.authorizedKeys?.[keyId]; + if (!key || !key.foreignKey) + return acc; + const foreignKey = String(key.foreignKey); + const fkUrl = new URL(foreignKey); + const foreignContractID = fkUrl.pathname; + const foreignKeyName = fkUrl.searchParams.get('keyName'); + if (!foreignKeyName) + throw new Error('Missing foreign key name'); + const foreignState = rootState[foreignContractID]; + if (!foreignState) + return acc; + const fKeyId = (0, utils_js_1.findKeyIdByName)(foreignState, foreignKeyName); + if (!fKeyId) { + // Key was deleted; mark it for deletion + if (pendingKeyRevocations[keyId] === true) { + this.config.reactiveSet(pendingKeyRevocations, keyId, 'del'); + } + return acc; + } + const [currentRingLevel, currentSigningKeyId, currentKeyArgs] = acc; + const ringLevel = Math.min(currentRingLevel, key.ringLevel ?? Number.POSITIVE_INFINITY); + if (ringLevel >= currentRingLevel) { + currentKeyArgs.push({ + name: key.name, + oldKeyId: keyId, + id: fKeyId, + data: foreignState._vm.authorizedKeys[fKeyId].data + }); + return [currentRingLevel, currentSigningKeyId, currentKeyArgs]; + } + else if (Number.isFinite(ringLevel)) { + const signingKeyId = (0, utils_js_1.findSuitableSecretKeyId)(contractState, [SPMessage_js_1.SPMessage.OP_KEY_UPDATE], ['sig'], ringLevel); + if (signingKeyId) { + currentKeyArgs.push({ + name: key.name, + oldKeyId: keyId, + id: fKeyId, + data: foreignState._vm.authorizedKeys[fKeyId].data + }); + return [ringLevel, signingKeyId, currentKeyArgs]; + } + } + return acc; + }, [Number.POSITIVE_INFINITY, '', []]); + if (keyUpdateArgs.length !== 0) { + const contractName = contractState._vm.type; + // This is safe to do without await because it's sending an operation + // Using await could deadlock when retrying to send the message + (0, sbp_1.default)('chelonia/out/keyUpdate', { contractID, contractName, data: keyUpdateArgs, signingKeyId: keyUpdateSigningKeyId }).catch((e) => { + console.error(`[chelonia/private/deleteOrRotateRevokedKeys] Error sending OP_KEY_UPDATE for ${contractID}`, e.message); + }); + } + // And then, we handle keys that have been deleted + const keysToDelete = Object.entries(pendingKeyRevocations).filter(([, v]) => v === 'del').map(([id]) => id); + // Aggregate the keys that we can delete to send them in a single operation + const [, keyDelSigningKeyId, keyIdsToDelete] = keysToDelete.reduce((acc, keyId) => { + const [currentRingLevel, currentSigningKeyId, currentKeyIds] = acc; + const ringLevel = Math.min(currentRingLevel, contractState._vm?.authorizedKeys?.[keyId]?.ringLevel ?? Number.POSITIVE_INFINITY); + if (ringLevel >= currentRingLevel) { + currentKeyIds.push(keyId); + return [currentRingLevel, currentSigningKeyId, currentKeyIds]; + } + else if (Number.isFinite(ringLevel)) { + const signingKeyId = (0, utils_js_1.findSuitableSecretKeyId)(contractState, [SPMessage_js_1.SPMessage.OP_KEY_DEL], ['sig'], ringLevel); + if (signingKeyId) { + currentKeyIds.push(keyId); + return [ringLevel, signingKeyId, currentKeyIds]; + } + } + return acc; + }, [Number.POSITIVE_INFINITY, '', []]); + if (keyIdsToDelete.length !== 0) { + const contractName = contractState._vm.type; + // This is safe to do without await because it's sending an operation + // Using await could deadlock when retrying to send the message + (0, sbp_1.default)('chelonia/out/keyDel', { contractID, contractName, data: keyIdsToDelete, signingKeyId: keyDelSigningKeyId }).catch((e) => { + console.error(`[chelonia/private/deleteRevokedKeys] Error sending OP_KEY_DEL for ${contractID}`, e.message); + }); + } + }, + 'chelonia/private/respondToAllKeyRequests': function (contractID) { + const state = (0, sbp_1.default)(this.config.stateSelector); + const contractState = state[contractID] ?? {}; + const pending = contractState?._vm?.pendingKeyshares; + if (!pending) + return; + const signingKeyId = (0, utils_js_1.findSuitableSecretKeyId)(contractState, [SPMessage_js_1.SPMessage.OP_ATOMIC, SPMessage_js_1.SPMessage.OP_KEY_REQUEST_SEEN, SPMessage_js_1.SPMessage.OP_KEY_SHARE], ['sig']); + if (!signingKeyId) { + console.log('Unable to respond to key request because there is no suitable secret key with OP_KEY_REQUEST_SEEN permission'); + return; + } + Object.entries(pending).map(([hash, entry]) => { + if (!Array.isArray(entry) || entry.length !== 4) { + return undefined; + } + const [, , , [originatingContractID]] = entry; + return (0, sbp_1.default)('chelonia/private/queueEvent', originatingContractID, ['chelonia/private/respondToKeyRequest', contractID, signingKeyId, hash]).catch((e) => { + console.error(`respondToAllKeyRequests: Error responding to key request ${hash} from ${originatingContractID} to ${contractID}`, e); + }); + }); + }, + 'chelonia/private/respondToKeyRequest': async function (contractID, signingKeyId, hash) { + const state = (0, sbp_1.default)(this.config.stateSelector); + const contractState = state[contractID]; + const entry = contractState?._vm?.pendingKeyshares?.[hash]; + const instance = this._instance; + if (!Array.isArray(entry) || entry.length !== 4) { + return; + } + const [keyShareEncryption, height, , [originatingContractID, rv, originatingContractHeight, headJSON]] = entry; + entry.pop(); + const krsEncryption = !!contractState._vm.authorizedKeys?.[signingKeyId]?._private; + // 1. Sync (originating) identity contract + await (0, sbp_1.default)('chelonia/private/in/syncContract', originatingContractID); + if (instance !== this._instance) + return; + const originatingState = state[originatingContractID]; + const contractName = state.contracts[contractID].type; + const originatingContractName = originatingState._vm.type; + const v = (0, signedData_js_1.signedIncomingData)(originatingContractID, originatingState, rv, originatingContractHeight, headJSON).valueOf(); + // 2. Verify 'data' + const { encryptionKeyId } = v; + const responseKey = (0, encryptedData_js_1.encryptedIncomingData)(contractID, contractState, v.responseKey, height, this.transientSecretKeys, headJSON).valueOf(); + const deserializedResponseKey = (0, crypto_1.deserializeKey)(responseKey); + const responseKeyId = (0, crypto_1.keyId)(deserializedResponseKey); + // This is safe to do without await because it's sending actions + // If we had await it could deadlock when retrying to send the event + Promise.resolve().then(() => { + if (instance !== this._instance) + return; + if (!(0, turtledash_1.has)(originatingState._vm.authorizedKeys, responseKeyId) || originatingState._vm.authorizedKeys[responseKeyId]._notAfterHeight != null) { + throw new Error(`Unable to respond to key request for ${originatingContractID}. Key ${responseKeyId} is not valid.`); + } + // We don't need to worry about persistence (if it was an outgoing + // message) here as this is done from an internal side-effect. + (0, sbp_1.default)('chelonia/storeSecretKeys', new Secret_js_1.Secret([ + { key: deserializedResponseKey } + ])); + const keys = (0, turtledash_1.pick)(state.secretKeys, Object.entries(contractState._vm.authorizedKeys) + .filter(([, key]) => !!key.meta?.private?.shareable) + .map(([kId]) => kId)); + if (!keys || Object.keys(keys).length === 0) { + console.info('respondToAllKeyRequests: no keys to share', { contractID, originatingContractID }); + return; + } + const keySharePayload = { + contractID, + keys: Object.entries(keys).map(([keyId, key]) => ({ + id: keyId, + meta: { + private: { + content: (0, encryptedData_js_1.encryptedOutgoingData)(originatingContractID, encryptionKeyId, key), + shareable: true + } + } + })), + keyRequestHash: hash, + keyRequestHeight: height + }; + // 3. Send OP_KEY_SHARE to identity contract + if (!contractState?._vm?.pendingKeyshares?.[hash]) { + // While we were getting ready, another client may have shared the keys + return; + } + return keySharePayload; + }).then((keySharePayload) => { + if (instance !== this._instance || !keySharePayload) + return; + return (0, sbp_1.default)('chelonia/out/keyShare', { + contractID: originatingContractID, + contractName: originatingContractName, + data: keyShareEncryption + ? (0, encryptedData_js_1.encryptedOutgoingData)(originatingContractID, (0, utils_js_1.findSuitablePublicKeyIds)(originatingState, [SPMessage_js_1.SPMessage.OP_KEY_SHARE], ['enc'])?.[0] || '', keySharePayload) + : keySharePayload, + signingKeyId: responseKeyId + }).then((msg) => { + if (instance !== this._instance) + return; + // 4(i). Remove originating contract and update current contract with information + const payload = { keyRequestHash: hash, keyShareHash: msg.hash(), success: true }; + const connectionKeyPayload = { + contractID: originatingContractID, + keys: [ + { + id: responseKeyId, + meta: { + private: { + content: (0, encryptedData_js_1.encryptedOutgoingData)(contractID, (0, utils_js_1.findSuitablePublicKeyIds)(contractState, [SPMessage_js_1.SPMessage.OP_KEY_REQUEST_SEEN], ['enc'])?.[0] || '', responseKey), + shareable: true + } + } + } + ] + }; + // This is safe to do without await because it's sending an action + // If we had await it could deadlock when retrying to send the event + (0, sbp_1.default)('chelonia/out/atomic', { + contractID, + contractName, + signingKeyId, + data: [ + [ + 'chelonia/out/keyRequestResponse', + { + data: krsEncryption + ? (0, encryptedData_js_1.encryptedOutgoingData)(contractID, (0, utils_js_1.findSuitablePublicKeyIds)(contractState, [SPMessage_js_1.SPMessage.OP_KEY_REQUEST_SEEN], ['enc'])?.[0] || '', payload) + : payload + } + ], + [ + // Upon successful key share, we want to share deserializedResponseKey + // with ourselves + 'chelonia/out/keyShare', + { + data: keyShareEncryption + ? (0, encryptedData_js_1.encryptedOutgoingData)(contractID, (0, utils_js_1.findSuitablePublicKeyIds)(contractState, [SPMessage_js_1.SPMessage.OP_KEY_SHARE], ['enc'])?.[0] || '', connectionKeyPayload) + : connectionKeyPayload + } + ] + ] + }).catch((e) => { + console.error('Error at respondToKeyRequest while sending keyRequestResponse', e); + }); + }); + }).catch((e) => { + console.error('Error at respondToKeyRequest', e); + const payload = { keyRequestHash: hash, success: false }; + // 4(ii). Remove originating contract and update current contract with information + if (!contractState?._vm?.pendingKeyshares?.[hash]) { + // While we were getting ready, another client may have shared the keys + return; + } + // This is safe to do without await because it's sending an action + // If we had await it could deadlock when retrying to send the event + (0, sbp_1.default)('chelonia/out/keyRequestResponse', { + contractID, + contractName, + signingKeyId, + data: krsEncryption + ? (0, encryptedData_js_1.encryptedOutgoingData)(contractID, (0, utils_js_1.findSuitablePublicKeyIds)(contractState, [SPMessage_js_1.SPMessage.OP_KEY_REQUEST_SEEN], ['enc'])?.[0] || '', payload) + : payload + }).catch((e) => { + console.error('Error at respondToKeyRequest while sending keyRequestResponse in error handler', e); + }); + }); + }, + 'chelonia/private/in/handleEvent': async function (contractID, rawMessage) { + const state = (0, sbp_1.default)(this.config.stateSelector); + const { preHandleEvent, postHandleEvent, handleEventError } = this.config.hooks; + let processingErrored = false; + let message; + // Errors in mutations result in ignored messages + // Errors in side effects result in dropped messages to be reprocessed + try { + // verify we're expecting to hear from this contract + if (!this.config.acceptAllMessages && !this.pending.some((entry) => entry?.contractID === contractID) && !this.subscriptionSet.has(contractID)) { + console.warn(`[chelonia] WARN: ignoring unexpected event for ${contractID}:`, rawMessage); + return; + } + // contractStateCopy has a copy of the current contract state, or an empty + // object if the state doesn't exist. This copy will be used to apply + // any changes from processing the current event as well as when calling + // side-effects and, once everything is processed, it will be applied + // to the global state. Important note: because the state change is + // applied to the Vuex state only if process is successful (and after both + // process and the sideEffect finish), any sideEffects that need to the + // access the state should do so only through the state that is passed in + // to the call to the sideEffect, or through a call though queueInvocation + // (so that the side effect runs after the changes are applied) + const contractStateCopy = state[contractID] ? (0, turtledash_1.cloneDeep)(state[contractID]) : Object.create(null); + // Now, deserialize the messsage + // The message is deserialized *here* and not earlier because deserialize + // constructs objects of signedIncomingData and encryptedIncomingData + // which are bound to the state. For some opcodes (such as OP_ATOMIC), the + // state could change in ways that are significant for further processing, + // so those objects need to be bound to the state copy (which is mutated) + // as opposed to the the root state (which is mutated only after + // processing is done). + // For instance, let's say the message contains an OP_ATOMIC comprising + // two operations: OP_KEY_ADD (adding a signing key) and OP_ACTION_ENCRYPTED + // (with an inner signature using this key in OP_KEY_ADD). If the state + // is bound to the copy (as below), then by the time OP_ACTION_ENCRYPTED + // is processed, the result of OP_KEY_ADD has been applied to the state + // copy. If we didn't specify a state or instead grabbed it from the root + // state, then we wouldn't be able to process OP_ACTION_ENCRYPTED correctly, + // as we wouldn't know that the key is valid from that state, and the + // state copy (contractStateCopy) is only written to the root state after + // all processing has completed. + message = SPMessage_js_1.SPMessage.deserialize(rawMessage, this.transientSecretKeys, contractStateCopy, this.config.unwrapMaybeEncryptedData); + if (message.contractID() !== contractID) { + throw new Error(`[chelonia] Wrong contract ID. Expected ${contractID} but got ${message.contractID()}`); + } + if (!message.isFirstMessage() && (!(0, turtledash_1.has)(state.contracts, contractID) || !(0, turtledash_1.has)(state, contractID))) { + throw new errors_js_1.ChelErrorUnrecoverable('The event is not for a first message but the contract state is missing'); + } + preHandleEvent?.(message); + // the order the following actions are done is critically important! + // first we make sure we can save this message to the db + // if an exception is thrown here we do not need to revert the state + // because nothing has been processed yet + const proceed = handleEvent.checkMessageOrdering.call(this, message); + if (proceed === false) + return; + // If the contract was marked as dirty, we stop processing + // The 'dirty' flag is set, possibly *by another contract*, indicating + // that a previously unknown encryption key has been received. This means + // that the current state is invalid (because it could changed based on + // this new information) and we must re-sync the contract. When this + // happens, we stop processing because the state will be regenerated. + if (state[contractID]?._volatile?.dirty) { + console.info(`[chelonia] Ignoring message ${message.description()} as the contract is marked as dirty`); + return; + } + const internalSideEffectStack = !this.config.skipSideEffects ? [] : undefined; + // process the mutation on the state + // IMPORTANT: even though we 'await' processMutation, everything in your + // contract's 'process' function must be synchronous! The only + // reason we 'await' here is to dynamically load any new contract + // source / definitions specified by the SPMessage + missingDecryptionKeyIdsMap.delete(message); + try { + await handleEvent.processMutation.call(this, message, contractStateCopy, internalSideEffectStack); + } + catch (e_) { + const e = e_; + if (e?.name === 'ChelErrorDecryptionKeyNotFound') { + console.warn(`[chelonia] WARN '${e.name}' in processMutation for ${message.description()}: ${e.message}`, e, message.serialize()); + if (e.cause) { + const missingDecryptionKeyIds = missingDecryptionKeyIdsMap.get(message); + if (missingDecryptionKeyIds) { + missingDecryptionKeyIds.add(e.cause); + } + else { + missingDecryptionKeyIdsMap.set(message, new Set([e.cause])); + } + } + } + else { + console.error(`[chelonia] ERROR '${e.name}' in processMutation for ${message.description()}: ${e.message || e}`, e, message.serialize()); + } + // we revert any changes to the contract state that occurred, ignoring this mutation + console.warn(`[chelonia] Error processing ${message.description()}: ${message.serialize()}. Any side effects will be skipped!`); + if (this.config.strictProcessing) { + throw e; + } + processingErrored = e?.name !== 'ChelErrorWarning'; + this.config.hooks.processError?.(e, message, getMsgMeta.call(this, message, contractID, contractStateCopy)); + // special error that prevents the head from being updated, effectively killing the contract + if (e.name === 'ChelErrorUnrecoverable' || + e.name === 'ChelErrorForkedChain' || + message.isFirstMessage()) { + throw e; + } + } + // process any side-effects (these must never result in any mutation to the contract state!) + if (!processingErrored) { + // Gets run get when skipSideEffects is false + if (Array.isArray(internalSideEffectStack) && internalSideEffectStack.length > 0) { + await Promise.all(internalSideEffectStack.map(fn => Promise.resolve(fn({ state: contractStateCopy, message: message })).catch((e_) => { + const e = e_; + console.error(`[chelonia] ERROR '${e.name}' in internal side effect for ${message.description()}: ${e.message}`, e, { message: message.serialize() }); + }))); + } + if (!this.config.skipActionProcessing && !this.config.skipSideEffects) { + await handleEvent.processSideEffects.call(this, message, contractStateCopy)?.catch((e_) => { + const e = e_; + console.error(`[chelonia] ERROR '${e.name}' in sideEffect for ${message.description()}: ${e.message}`, e, { message: message.serialize() }); + // We used to revert the state and rethrow the error here, but we no longer do that + // see this issue for why: https://github.com/okTurtles/group-income/issues/1544 + this.config.hooks.sideEffectError?.(e, message); + }); + } + } + // We keep changes to the contract state and state.contracts as close as + // possible in the code to reduce the chances of still ending up with + // an inconsistent state if a sudden failure happens while this code + // is executing. In particular, everything in between should be synchronous. + // This block will apply all the changes related to modifying the state + // after an event has been processed: + // 1. Adding the messge to the DB + // 2. Applying changes to the contract state + // 3. Applying changes to rootState.contracts + try { + const state = (0, sbp_1.default)(this.config.stateSelector); + await handleEvent.applyProcessResult.call(this, { message, state, contractState: contractStateCopy, processingErrored, postHandleEvent }); + } + catch (e_) { + const e = e_; + console.error(`[chelonia] ERROR '${e.name}' for ${message.description()} marking the event as processed: ${e.message}`, e, { message: message.serialize() }); + } + } + catch (e_) { + const e = e_; + console.error(`[chelonia] ERROR in handleEvent: ${e.message || e}`, e); + try { + handleEventError?.(e, message); + } + catch (e2) { + console.error('[chelonia] Ignoring user error in handleEventError hook:', e2); + } + throw e; + } + finally { + if (message) { + missingDecryptionKeyIdsMap.delete(message); + } + } + } +}); +const eventsToReingest = []; +const reprocessDebounced = (0, turtledash_1.debounce)((contractID) => (0, sbp_1.default)('chelonia/private/out/sync', contractID, { force: true }).catch((e) => { + console.error(`[chelonia] Error at reprocessDebounced for ${contractID}`, e); +}), 1000); +const handleEvent = { + checkMessageOrdering(message) { + const contractID = message.contractID(); + const hash = message.hash(); + const height = message.height(); + const state = (0, sbp_1.default)(this.config.stateSelector); + // The latest height we want to use is the one from `state.contracts` and + // not the one from the DB. The height in the state reflects the latest + // message that's been processed, which is desired here. On the other hand, + // the DB function includes the latest known message for that contract, + // which can be ahead of the latest message processed. + const latestProcessedHeight = state.contracts[contractID]?.height; + if (!Number.isSafeInteger(height)) { + throw new errors_js_1.ChelErrorDBBadPreviousHEAD(`Message ${hash} in contract ${contractID} has an invalid height.`); + } + // Avoid re-processing already processed messages + if (message.isFirstMessage() + // If this is the first message, the height is is expected not to exist + ? latestProcessedHeight != null + // If this isn't the first message, the height must not be lower than the + // current's message height. The check is negated to handle NaN values + : !(latestProcessedHeight < height)) { + // The web client may sometimes get repeated messages. If strict ordering + // isn't enabled, instead of throwing we return false. + // On the other hand, the server must enforce strict ordering. + if (!this.config.strictOrdering) { + return false; + } + throw new errors_js_1.ChelErrorAlreadyProcessed(`Message ${hash} with height ${height} in contract ${contractID} has already been processed. Current height: ${latestProcessedHeight}.`); + } + // If the message is from the future, add it to eventsToReingest + if ((latestProcessedHeight + 1) < height) { + if (this.config.strictOrdering) { + throw new errors_js_1.ChelErrorDBBadPreviousHEAD(`Unexpected message ${hash} with height ${height} in contract ${contractID}: height is too high. Current height: ${latestProcessedHeight}.`); + } + // sometimes we simply miss messages, it's not clear why, but it happens + // in rare cases. So we attempt to re-sync this contract once + if (eventsToReingest.length > 100) { + throw new errors_js_1.ChelErrorUnrecoverable('more than 100 different bad previousHEAD errors'); + } + if (!eventsToReingest.includes(hash)) { + console.warn(`[chelonia] WARN bad previousHEAD for ${message.description()}, will attempt to re-sync contract to reingest message`); + eventsToReingest.push(hash); + reprocessDebounced(contractID); + return false; // ignore the error for now + } + else { + console.error(`[chelonia] ERROR already attempted to reingest ${message.description()}, will not attempt again!`); + throw new errors_js_1.ChelErrorDBBadPreviousHEAD(`Already attempted to reingest ${hash}`); + } + } + const reprocessIdx = eventsToReingest.indexOf(hash); + if (reprocessIdx !== -1) { + console.warn(`[chelonia] WARN: successfully reingested ${message.description()}`); + eventsToReingest.splice(reprocessIdx, 1); + } + }, + async processMutation(message, state, internalSideEffectStack) { + const contractID = message.contractID(); + if (message.isFirstMessage()) { + // Allow having _volatile but nothing else if this is the first message, + // as we should be starting off with a clean state + if (Object.keys(state).some(k => k !== '_volatile')) { + throw new errors_js_1.ChelErrorUnrecoverable(`state for ${contractID} is already set`); + } + } + await (0, sbp_1.default)('chelonia/private/in/processMessage', message, state, internalSideEffectStack); + }, + processSideEffects(message, state) { + const opT = message.opType(); + if (![SPMessage_js_1.SPMessage.OP_ATOMIC, SPMessage_js_1.SPMessage.OP_ACTION_ENCRYPTED, SPMessage_js_1.SPMessage.OP_ACTION_UNENCRYPTED].includes(opT)) { + return; + } + const contractID = message.contractID(); + const manifestHash = message.manifest(); + const hash = message.hash(); + const height = message.height(); + const signingKeyId = message.signingKeyId(); + const callSideEffect = async (field) => { + const wv = this.config.unwrapMaybeEncryptedData(field); + if (!wv) + return; + let v = wv.data; + let innerSigningKeyId; + if ((0, signedData_js_1.isSignedData)(v)) { + innerSigningKeyId = v.signingKeyId; + v = v.valueOf(); + } + const { action, data, meta } = v; + const mutation = { + data, + meta, + hash, + height, + contractID, + description: message.description(), + direction: message.direction(), + signingKeyId, + get signingContractID() { + return (0, utils_js_1.getContractIDfromKeyId)(contractID, signingKeyId, state); + }, + innerSigningKeyId, + get innerSigningContractID() { + return (0, utils_js_1.getContractIDfromKeyId)(contractID, innerSigningKeyId, state); + } + }; + return await (0, sbp_1.default)(`${manifestHash}/${action}/sideEffect`, mutation, state); + }; + const msg = Object(message.message()); + if (opT !== SPMessage_js_1.SPMessage.OP_ATOMIC) { + return callSideEffect(msg); + } + const reducer = (acc, [opT, opV]) => { + if ([SPMessage_js_1.SPMessage.OP_ACTION_ENCRYPTED, SPMessage_js_1.SPMessage.OP_ACTION_UNENCRYPTED].includes(opT)) { + acc.push(Object(opV)); + } + return acc; + }; + const actionsOpV = msg.reduce(reducer, []); + return Promise.allSettled(actionsOpV.map((action) => callSideEffect(action))).then((results) => { + const errors = results.filter((r) => r.status === 'rejected').map((r) => r.reason); + if (errors.length > 0) { + console.error('Side-effect errors', contractID, errors); + throw new AggregateError(errors, `Error at side effects for ${contractID}`); + } + }); + }, + async applyProcessResult({ message, state, contractState, processingErrored, postHandleEvent }) { + const contractID = message.contractID(); + const hash = message.hash(); + const height = message.height(); + await (0, sbp_1.default)('chelonia/db/addEntry', message); + if (!processingErrored) { + // Once side-effects are called, we apply changes to the state. + // This means, as mentioned above, that retrieving the contract state + // via the global state will yield incorrect results. Doing things in + // this order ensures that incomplete processing of events (i.e., process + // + side-effects), e.g., due to sudden failures (like power outages, + // Internet being disconnected, etc.) aren't persisted. This allows + // us to recover by re-processing the event when these sudden failures + // happen + this.config.reactiveSet(state, contractID, contractState); + try { + postHandleEvent?.(message); + } + catch (e) { + console.error(`[chelonia] ERROR '${e.name}' for ${message.description()} in event post-handling: ${e.message}`, e, { message: message.serialize() }); + } + } + // whether or not there was an exception, we proceed ahead with updating the head + // you can prevent this by throwing an exception in the processError hook + if (message.isFirstMessage()) { + const { type } = message.opValue(); + if (!(0, turtledash_1.has)(state.contracts, contractID)) { + this.config.reactiveSet(state.contracts, contractID, Object.create(null)); + } + this.config.reactiveSet(state.contracts[contractID], 'type', type); + console.debug(`contract ${type} registered for ${contractID}`); + } + if (message.isKeyOp()) { + this.config.reactiveSet(state.contracts[contractID], 'previousKeyOp', hash); + } + this.config.reactiveSet(state.contracts[contractID], 'HEAD', hash); + this.config.reactiveSet(state.contracts[contractID], 'height', height); + // If there were decryption errors due to missing encryption keys, we store + // those key IDs. If those key IDs are later shared with us, we can re-sync + // the contract. Without this information, we can only guess whether a + // re-sync is needed or not. + // We do it here because the property is stored under `.contracts` instead + // of in the contract state itself, and this is where `.contracts` gets + // updated after handling a message. + const missingDecryptionKeyIdsForMessage = missingDecryptionKeyIdsMap.get(message); + if (missingDecryptionKeyIdsForMessage) { + let missingDecryptionKeyIds = state.contracts[contractID].missingDecryptionKeyIds; + if (!missingDecryptionKeyIds) { + missingDecryptionKeyIds = []; + this.config.reactiveSet(state.contracts[contractID], 'missingDecryptionKeyIds', missingDecryptionKeyIds); + } + missingDecryptionKeyIdsForMessage.forEach(keyId => { + if (missingDecryptionKeyIds.includes(keyId)) + return; + missingDecryptionKeyIds.push(keyId); + }); + } + if (!this.subscriptionSet.has(contractID)) { + const entry = this.pending.find((entry) => entry?.contractID === contractID); + // we've successfully received it back, so remove it from expectation pending + if (entry) { + const index = this.pending.indexOf(entry); + if (index !== -1) { + this.pending.splice(index, 1); + } + } + this.subscriptionSet.add(contractID); + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { added: [contractID], removed: [] }); + } + if (!processingErrored) { + (0, sbp_1.default)('okTurtles.events/emit', hash, contractID, message); + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.EVENT_HANDLED, contractID, message); + } + } +}; +const notImplemented = (v) => { + throw new Error(`chelonia: action not implemented to handle: ${JSON.stringify(v)}.`); +}; +// The code below represents different ways to dynamically load code at runtime, +// and the SES example shows how to sandbox runtime loaded code (although it doesn't +// work, see https://github.com/endojs/endo/issues/1207 for details). It's also not +// super important since we're loaded signed contracts. +/* +// https://2ality.com/2019/10/eval-via-import.html +// Example: await import(esm`${source}`) +// const esm = ({ raw }, ...vals) => { +// return URL.createObjectURL(new Blob([String.raw({ raw }, ...vals)], { type: 'text/javascript' })) +// } + +// await loadScript.call(this, contractInfo.file, source, contractInfo.hash) +// .then(x => { +// console.debug(`loaded ${contractInfo.file}`) +// return x +// }) +// eslint-disable-next-line no-unused-vars +function loadScript (file, source, hash) { + return new Promise((resolve, reject) => { + const script = document.createElement('script') + // script.type = 'application/javascript' + script.type = 'module' + // problem with this is that scripts will step on each other's feet + script.text = source + // NOTE: this will work if the file route adds .header('Content-Type', 'application/javascript') + // script.src = `${this.config.connectionURL}/file/${hash}` + // this results in: "SyntaxError: import declarations may only appear at top level of a module" + // script.text = `(function () { + // ${source} + // })()` + script.onload = () => resolve(script) + script.onerror = (err) => reject(new Error(`${err || 'Error'} trying to load: ${file}`)) + document.getElementsByTagName('head')[0].appendChild(script) + }) +} + +// This code is cobbled together based on: +// https://github.com/endojs/endo/blob/master/packages/ses/test/test-import-cjs.js +// https://github.com/endojs/endo/blob/master/packages/ses/test/test-import.js +// const vm = await sesImportVM.call(this, `${this.config.connectionURL}/file/${contractInfo.hash}`) +// eslint-disable-next-line no-unused-vars +function sesImportVM (url): Promise { + // eslint-disable-next-line no-undef + const vm = new Compartment( + { + ...this.config.contracts.defaults.exposedGlobals, + console + }, + {}, // module map + { + resolveHook (spec, referrer) { + console.debug('resolveHook', { spec, referrer }) + return spec + }, + // eslint-disable-next-line require-await + async importHook (moduleSpecifier: string, ...args) { + const source = await this.config.fetch(moduleSpecifier).then(handleFetchResult('text')) + console.debug('importHook', { fetch: moduleSpecifier, args, source }) + const execute = (moduleExports, compartment, resolvedImports) => { + console.debug('execute called with:', { moduleExports, resolvedImports }) + const functor = compartment.evaluate( + `(function (require, exports, module, __filename, __dirname) { ${source} })` + // this doesn't seem to help with: https://github.com/endojs/endo/issues/1207 + // { __evadeHtmlCommentTest__: false, __rejectSomeDirectEvalExpressions__: false } + ) + const require_ = (importSpecifier) => { + console.debug('in-source require called with:', importSpecifier, 'keying:', resolvedImports) + const namespace = compartment.importNow(resolvedImports[importSpecifier]) + console.debug('got namespace:', namespace) + return namespace.default === undefined ? namespace : namespace.default + } + const module_ = { + get exports () { + return moduleExports + }, + set exports (newModuleExports) { + moduleExports.default = newModuleExports + } + } + functor(require_, moduleExports, module_, moduleSpecifier) + } + if (moduleSpecifier === '@common/common.cjs') { + return { + imports: [], + exports: ['Vue', 'L'], + execute + } + } else { + return { + imports: ['@common/common.cjs'], + exports: [], + execute + } + } + } + } + ) + // vm.evaluate(source) + return vm.import(url) +} +*/ diff --git a/dist/cjs/internals.d.cts b/dist/cjs/internals.d.cts new file mode 100644 index 0000000..7e352b4 --- /dev/null +++ b/dist/cjs/internals.d.cts @@ -0,0 +1,3 @@ +import './db.cjs'; +declare const _default: string[]; +export default _default; diff --git a/dist/cjs/local-selectors/index.cjs b/dist/cjs/local-selectors/index.cjs new file mode 100644 index 0000000..d0f03a2 --- /dev/null +++ b/dist/cjs/local-selectors/index.cjs @@ -0,0 +1,124 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +// This file provides utility functions that are local regardless of whether +// Chelonia is running in a different context and calls are being forwarded +// using `chelonia/*` +const sbp_1 = __importDefault(require("@sbp/sbp")); +const turtledash_1 = require("turtledash"); +const events_js_1 = require("../events.cjs"); +exports.default = (0, sbp_1.default)('sbp/selectors/register', { + // This selector sets up event listeners on EVENT_HANDLED and CONTRACTS_MODIFIED + // to keep Chelonia state in sync with some external state (e.g., Vuex). + // This needs to be called from the context that owns this external state + // (e.g., the tab in which the app is running) and because 'full' Chelonia may + // be available in this context, we cannot use `chelonia/configure`. + // _If there is no external state to be kept in sync with Chelonia, this selector doesn't need to be called_ + // + // For example, **if Chelonia is running on a service worker**, the following + // would be done. + // 1. The service worker calls `chelonia/configure` and forwards EVENT_HANDLED + // and CONTRACTS_MODIFIED events to all clients (tabs) + // Note: `chelonia/configure` is called by the context running Chelonia + // 2. Each tab uses `chelonia/*` to forward calls to Chelonia to the SW. + // Note: Except selectors defined in this file + // 3. Each tab calls this selector once to set up event listeners on EVENT_HANDLED + // and CONTRACTS_MODIFIED, which will keep each tab's state updated every + // time Chelonia handles an event. + 'chelonia/externalStateSetup': function ({ stateSelector, reactiveSet = Reflect.set.bind(Reflect), reactiveDel = Reflect.deleteProperty.bind(Reflect) }) { + this.stateSelector = stateSelector; + (0, sbp_1.default)('okTurtles.events/on', events_js_1.EVENT_HANDLED, (contractID, message) => { + // The purpose of putting things immediately into a queue is to have + // state mutations happen in a well-defined order. This is done for two + // purposes: + // 1. It avoids race conditions + // 2. It allows the app to use the EVENT_HANDLED queue to ensure that + // the SW state has been copied over to the local state. This is + // useful in the same sense that `chelonia/contract/wait` is useful + // (i.e., set up a barrier / sync checkpoint). + (0, sbp_1.default)('okTurtles.eventQueue/queueEvent', events_js_1.EVENT_HANDLED, async () => { + const { contractState, cheloniaState } = await (0, sbp_1.default)('chelonia/contract/fullState', contractID); + const externalState = (0, sbp_1.default)(stateSelector); + if (cheloniaState) { + if (!externalState.contracts) { + reactiveSet(externalState, 'contracts', Object.create(null)); + } + reactiveSet(externalState.contracts, contractID, (0, turtledash_1.cloneDeep)(cheloniaState)); + } + else if (externalState.contracts) { + reactiveDel(externalState.contracts, contractID); + } + if (contractState) { + reactiveSet(externalState, contractID, (0, turtledash_1.cloneDeep)(contractState)); + } + else { + reactiveDel(externalState, contractID); + } + // This EVENT_HANDLED_READY event lets the current context (e.g., tab) + // know that an event has been processed _and_ committed to the state + // (as opposed to EVENT_HANDLED, which means the event was processed by + // _Chelonia_ but state changes may not be reflected in the current tab + // yet). + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.EVENT_HANDLED_READY, contractID, message); + }); + }); + (0, sbp_1.default)('okTurtles.events/on', events_js_1.CONTRACTS_MODIFIED, (subscriptionSet, { added, removed, permanent }) => { + (0, sbp_1.default)('okTurtles.eventQueue/queueEvent', events_js_1.EVENT_HANDLED, async () => { + const states = added.length + ? await (0, sbp_1.default)('chelonia/contract/fullState', added) + : {}; + const vuexState = (0, sbp_1.default)('state/vuex/state'); + if (!vuexState.contracts) { + reactiveSet(vuexState, 'contracts', Object.create(null)); + } + removed.forEach((contractID) => { + if (permanent) { + reactiveSet(vuexState.contracts, contractID, null); + } + else { + reactiveDel(vuexState.contracts, contractID); + } + reactiveDel(vuexState, contractID); + }); + for (const contractID of added) { + const { contractState, cheloniaState } = states[contractID]; + if (cheloniaState) { + reactiveSet(vuexState.contracts, contractID, (0, turtledash_1.cloneDeep)(cheloniaState)); + } + if (contractState) { + reactiveSet(vuexState, contractID, (0, turtledash_1.cloneDeep)(contractState)); + } + } + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.CONTRACTS_MODIFIED_READY, subscriptionSet, { added, removed }); + }); + }); + }, + // This function is similar in purpose to `chelonia/contract/wait`, except + // that it's also designed to take into account delays copying Chelonia state + // to an external state (e.g., when using `chelonia/externalStateSetup`). + 'chelonia/externalStateWait': async function (contractID) { + await (0, sbp_1.default)('chelonia/contract/wait', contractID); + const { cheloniaState } = await (0, sbp_1.default)('chelonia/contract/fullState', contractID); + const localState = (0, sbp_1.default)(this.stateSelector); + // If the current 'local' state has a height higher than or equal to the + // Chelonia height, we've processed all events and don't need to wait any + // longer. + if (!cheloniaState || cheloniaState.height <= localState.contracts[contractID]?.height) + return; + // Otherwise, listen for `EVENT_HANDLED_READY` events till we have reached + // the necessary height. + return new Promise((resolve) => { + const removeListener = (0, sbp_1.default)('okTurtles.events/on', events_js_1.EVENT_HANDLED_READY, (cID) => { + if (cID !== contractID) + return; + const localState = (0, sbp_1.default)(this.stateSelector); + if (cheloniaState.height <= localState.contracts[contractID]?.height) { + resolve(); + removeListener(); + } + }); + }); + } +}); diff --git a/dist/cjs/local-selectors/index.d.cts b/dist/cjs/local-selectors/index.d.cts new file mode 100644 index 0000000..d451d2b --- /dev/null +++ b/dist/cjs/local-selectors/index.d.cts @@ -0,0 +1,2 @@ +declare const _default: string[]; +export default _default; diff --git a/dist/cjs/persistent-actions.cjs b/dist/cjs/persistent-actions.cjs new file mode 100644 index 0000000..588beec --- /dev/null +++ b/dist/cjs/persistent-actions.cjs @@ -0,0 +1,219 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PersistentAction = void 0; +require("@sbp/okturtles.events"); +const sbp_1 = __importDefault(require("@sbp/sbp")); +const events_js_1 = require("./events.cjs"); +// Using `Symbol` to prevent enumeration; this avoids JSON serialization. +const timer = Symbol('timer'); +const coerceToError = (arg) => { + if (arg && arg instanceof Error) + return arg; + console.warn(tag, 'Please use Error objects when throwing or rejecting'); + return new Error((typeof arg === 'string' ? arg : JSON.stringify(arg)) ?? 'undefined'); +}; +const defaultOptions = { + maxAttempts: Number.POSITIVE_INFINITY, + retrySeconds: 30 +}; +const tag = '[chelonia.persistentActions]'; +class PersistentAction { + id; + invocation; + options; + status; + [timer]; + constructor(invocation, options = {}) { + this.id = crypto.randomUUID(); + this.invocation = invocation; + this.options = { ...defaultOptions, ...options }; + this.status = { + attempting: false, + failedAttemptsSoFar: 0, + lastError: '', + nextRetry: '', + resolved: false + }; + } + async attempt() { + // Bail out if the action is already attempting or resolved. + // TODO: should we also check whether the skipCondition call is pending? + if (this.status.attempting || this.status.resolved) + return; + if (await this.trySBP(this.options.skipCondition)) + this.cancel(); + // We need to check this again because cancel() could have been called while awaiting the trySBP call. + if (this.status.resolved) + return; + try { + this.status.attempting = true; + const result = await (0, sbp_1.default)(...this.invocation); + this.status.attempting = false; + this.handleSuccess(result); + } + catch (error) { + this.status.attempting = false; + await this.handleError(coerceToError(error)); + } + } + cancel() { + if (this[timer]) + clearTimeout(this[timer]); + this.status.nextRetry = ''; + this.status.resolved = true; + } + async handleError(error) { + const { id, options, status } = this; + // Update relevant status fields before calling any optional code. + status.failedAttemptsSoFar++; + status.lastError = error.message; + const anyAttemptLeft = options.maxAttempts > status.failedAttemptsSoFar; + if (!anyAttemptLeft) + status.resolved = true; + status.nextRetry = anyAttemptLeft && !status.resolved + ? new Date(Date.now() + options.retrySeconds * 1e3).toISOString() + : ''; + // Perform any optional SBP invocation. + // The event has to be fired first for the action to be immediately removed from the list. + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.PERSISTENT_ACTION_FAILURE, { error, id }); + await this.trySBP(options.errorInvocation); + if (!anyAttemptLeft) { + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.PERSISTENT_ACTION_TOTAL_FAILURE, { error, id }); + await this.trySBP(options.totalFailureInvocation); + } + // Schedule a retry if appropriate. + if (status.nextRetry) { + // Note: there should be no older active timeout to clear. + this[timer] = setTimeout(() => { + this.attempt().catch((e) => { + console.error('Error attempting persistent action', id, e); + }); + }, this.options.retrySeconds * 1e3); + } + } + handleSuccess(result) { + const { id, status } = this; + status.lastError = ''; + status.nextRetry = ''; + status.resolved = true; + (0, sbp_1.default)('okTurtles.events/emit', events_js_1.PERSISTENT_ACTION_SUCCESS, { id, result }); + } + async trySBP(invocation) { + try { + return invocation ? await (0, sbp_1.default)(...invocation) : undefined; + } + catch (error) { + console.error(tag, coerceToError(error).message); + } + } +} +exports.PersistentAction = PersistentAction; +exports.default = (0, sbp_1.default)('sbp/selectors/register', { + 'chelonia.persistentActions/_init'() { + this.actionsByID = Object.create(null); + this.checkDatabaseKey = () => { + if (!this.databaseKey) + throw new TypeError(`${tag} No database key configured`); + }; + (0, sbp_1.default)('okTurtles.events/on', events_js_1.PERSISTENT_ACTION_SUCCESS, ({ id }) => { + (0, sbp_1.default)('chelonia.persistentActions/cancel', id); + }); + (0, sbp_1.default)('okTurtles.events/on', events_js_1.PERSISTENT_ACTION_TOTAL_FAILURE, ({ id }) => { + (0, sbp_1.default)('chelonia.persistentActions/cancel', id); + }); + }, + // Cancels a specific action by its ID. + // The action won't be retried again, but an async action cannot be aborted if its promise is stil attempting. + async 'chelonia.persistentActions/cancel'(id) { + if (id in this.actionsByID) { + this.actionsByID[id].cancel(); + // Note: this renders the `.status` update in `.cancel()` meainingless, as + // the action will be immediately removed. TODO: Implement as periodic + // prune action so that actions are removed some time after completion. + // This way, one could implement action status reporting to clients. + delete this.actionsByID[id]; + return await (0, sbp_1.default)('chelonia.persistentActions/save'); + } + }, + // TODO: validation + 'chelonia.persistentActions/configure'({ databaseKey, options = {} }) { + this.databaseKey = databaseKey; + for (const key in options) { + if (key in defaultOptions) { + defaultOptions[key] = options[key]; + } + else { + throw new TypeError(`${tag} Unknown option: ${key}`); + } + } + }, + 'chelonia.persistentActions/enqueue'(...args) { + const ids = []; + for (const arg of args) { + const action = Array.isArray(arg) + ? new PersistentAction(arg) + : new PersistentAction(arg.invocation, arg); + this.actionsByID[action.id] = action; + ids.push(action.id); + } + (0, sbp_1.default)('chelonia.persistentActions/save').catch((e) => { + console.error('Error saving persistent actions', e); + }); + for (const id of ids) { + this.actionsByID[id].attempt().catch((e) => { + console.error('Error attempting persistent action', id, e); + }); + } + return ids; + }, + // Forces retrying a given persisted action immediately, rather than waiting for the scheduled retry. + // - 'status.failedAttemptsSoFar' will still be increased upon failure. + // - Does nothing if a retry is already running. + // - Does nothing if the action has already been resolved, rejected or cancelled. + 'chelonia.persistentActions/forceRetry'(id) { + if (id in this.actionsByID) { + return this.actionsByID[id].attempt(); + } + }, + // Loads and tries every stored persistent action under the configured database key. + async 'chelonia.persistentActions/load'() { + this.checkDatabaseKey(); + const storedActions = JSON.parse((await (0, sbp_1.default)('chelonia.db/get', this.databaseKey)) ?? '[]'); + for (const { id, invocation, options } of storedActions) { + this.actionsByID[id] = new PersistentAction(invocation, options); + // Use the stored ID instead of the autogenerated one. + // TODO: find a cleaner alternative. + this.actionsByID[id].id = id; + } + return (0, sbp_1.default)('chelonia.persistentActions/retryAll'); + }, + // Retry all existing persisted actions. + // TODO: add some delay between actions so as not to spam the server, + // or have a way to issue them all at once in a single network call. + 'chelonia.persistentActions/retryAll'() { + return Promise.allSettled(Object.keys(this.actionsByID).map(id => (0, sbp_1.default)('chelonia.persistentActions/forceRetry', id))); + }, + // Updates the database version of the attempting action list. + 'chelonia.persistentActions/save'() { + this.checkDatabaseKey(); + return (0, sbp_1.default)('chelonia.db/set', this.databaseKey, JSON.stringify(Object.values(this.actionsByID))); + }, + 'chelonia.persistentActions/status'() { + return Object.values(this.actionsByID) + .map((action) => ({ id: action.id, invocation: action.invocation, ...action.status })); + }, + // Pauses every currently loaded action, and removes them from memory. + // Note: persistent storage is not affected, so that these actions can be later loaded again and retried. + 'chelonia.persistentActions/unload'() { + for (const id in this.actionsByID) { + // Clear the action's timeout, but don't cancel it so that it can later resumed. + if (this.actionsByID[id][timer]) { + clearTimeout(this.actionsByID[id][timer]); + } + delete this.actionsByID[id]; + } + } +}); diff --git a/dist/cjs/persistent-actions.d.cts b/dist/cjs/persistent-actions.d.cts new file mode 100644 index 0000000..cd55d64 --- /dev/null +++ b/dist/cjs/persistent-actions.d.cts @@ -0,0 +1,51 @@ +import '@sbp/okturtles.events'; +import sbp from '@sbp/sbp'; +declare const timer: unique symbol; +type SbpInvocation = Parameters; +export type UUIDV4 = `${string}-${string}-${string}-${string}-${string}`; +type PersistentActionOptions = { + errorInvocation?: SbpInvocation; + maxAttempts: number; + retrySeconds: number; + skipCondition?: SbpInvocation; + totalFailureInvocation?: SbpInvocation; +}; +export type PersistentActionStatus = { + attempting: boolean; + failedAttemptsSoFar: number; + lastError: string; + nextRetry: string; + resolved: boolean; +}; +export type PersistentActionError = { + id: UUIDV4; + error: Error; +}; +export type PersistentActionSuccess = { + id: UUIDV4; + result: unknown; +}; +export type PersistentActionSbpStatus = { + id: UUIDV4; + invocation: SbpInvocation; + attempting: boolean; + failedAttemptsSoFar: number; + lastError: string; + nextRetry: string; + resolved: boolean; +}; +export declare class PersistentAction { + id: UUIDV4; + invocation: SbpInvocation; + options: PersistentActionOptions; + status: PersistentActionStatus; + [timer]?: ReturnType; + constructor(invocation: SbpInvocation, options?: Partial); + attempt(): Promise; + cancel(): void; + handleError(error: Error): Promise; + handleSuccess(result: unknown): void; + trySBP(invocation: SbpInvocation | void): Promise; +} +declare const _default: string[]; +export default _default; diff --git a/dist/cjs/presets.cjs b/dist/cjs/presets.cjs new file mode 100644 index 0000000..4eeefa5 --- /dev/null +++ b/dist/cjs/presets.cjs @@ -0,0 +1,25 @@ +"use strict"; +// Right now, we only have a single preset, for the server. If this remains the +// case and only the server is special regarding configuration, consider +// introducing a `server: true` key to `chelonia/confgure` instead. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SERVER = void 0; +exports.SERVER = { + // We don't check the subscriptionSet in the server because we accpt new + // contract registrations, and are also not subcribed to contracts the same + // way clients are + acceptAllMessages: true, + // The server also doesn't process actions + skipActionProcessing: true, + // The previous setting implies this one, which we set to be on the safe side + skipSideEffects: true, + // Changes the behaviour of unwrapMaybeEncryptedData so that it never decrypts. + // Mostly useful for the server, to avoid filling up the logs and for faster + // execution. + skipDecryptionAttempts: true, + // If an error occurs during processing, the message is rejected rather than + // ignored + strictProcessing: true, + // The server expects events to be received in order (no past or future events) + strictOrdering: true +}; diff --git a/dist/cjs/presets.d.cts b/dist/cjs/presets.d.cts new file mode 100644 index 0000000..3a8b619 --- /dev/null +++ b/dist/cjs/presets.d.cts @@ -0,0 +1,8 @@ +export declare const SERVER: { + acceptAllMessages: boolean; + skipActionProcessing: boolean; + skipSideEffects: boolean; + skipDecryptionAttempts: boolean; + strictProcessing: boolean; + strictOrdering: boolean; +}; diff --git a/dist/cjs/pubsub/index.cjs b/dist/cjs/pubsub/index.cjs new file mode 100644 index 0000000..6a1432f --- /dev/null +++ b/dist/cjs/pubsub/index.cjs @@ -0,0 +1,641 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.messageParser = exports.PUBSUB_SUBSCRIPTION_SUCCEEDED = exports.PUBSUB_RECONNECTION_SUCCEEDED = exports.PUBSUB_RECONNECTION_SCHEDULED = exports.PUBSUB_RECONNECTION_FAILED = exports.PUBSUB_RECONNECTION_ATTEMPT = exports.PUBSUB_ERROR = exports.PUSH_SERVER_ACTION_TYPE = exports.RESPONSE_TYPE = exports.REQUEST_TYPE = exports.NOTIFICATION_TYPE = void 0; +exports.createClient = createClient; +exports.createMessage = createMessage; +exports.createKvMessage = createKvMessage; +exports.createPubMessage = createPubMessage; +exports.createRequest = createRequest; +/* eslint-disable @typescript-eslint/no-this-alias */ +require("@sbp/okturtles.events"); +const sbp_1 = __importDefault(require("@sbp/sbp")); +// ====== Enums ====== // +exports.NOTIFICATION_TYPE = Object.freeze({ + ENTRY: 'entry', + DELETION: 'deletion', + KV: 'kv', + KV_FILTER: 'kv_filter', + PING: 'ping', + PONG: 'pong', + PUB: 'pub', + SUB: 'sub', + UNSUB: 'unsub', + VERSION_INFO: 'version_info' +}); +exports.REQUEST_TYPE = Object.freeze({ + PUB: 'pub', + SUB: 'sub', + UNSUB: 'unsub', + PUSH_ACTION: 'push_action', + KV_FILTER: 'kv_filter' +}); +exports.RESPONSE_TYPE = Object.freeze({ + ERROR: 'error', + OK: 'ok' +}); +exports.PUSH_SERVER_ACTION_TYPE = Object.freeze({ + SEND_PUBLIC_KEY: 'send-public-key', + STORE_SUBSCRIPTION: 'store-subscription', + DELETE_SUBSCRIPTION: 'delete-subscription', + SEND_PUSH_NOTIFICATION: 'send-push-notification' +}); +// TODO: verify these are good defaults +const defaultOptions = { + logPingMessages: process.env.NODE_ENV === 'development' && !process.env.CI, + pingTimeout: 45000, + maxReconnectionDelay: 60000, + maxRetries: 10, + minReconnectionDelay: 500, + reconnectOnDisconnection: true, + reconnectOnOnline: true, + // Defaults to false to avoid reconnection attempts in case the server doesn't + // respond because of a failed authentication. + reconnectOnTimeout: false, + reconnectionDelayGrowFactor: 2, + timeout: 60000 +}; +// ====== Event name constants ====== // +exports.PUBSUB_ERROR = 'pubsub-error'; +exports.PUBSUB_RECONNECTION_ATTEMPT = 'pubsub-reconnection-attempt'; +exports.PUBSUB_RECONNECTION_FAILED = 'pubsub-reconnection-failed'; +exports.PUBSUB_RECONNECTION_SCHEDULED = 'pubsub-reconnection-scheduled'; +exports.PUBSUB_RECONNECTION_SUCCEEDED = 'pubsub-reconnection-succeeded'; +exports.PUBSUB_SUBSCRIPTION_SUCCEEDED = 'pubsub-subscription-succeeded'; +// ====== API ====== // +/** + * Creates a pubsub client instance. + * + * @param {string} url - A WebSocket URL to connect to. + * @param {Object?} options + * {object?} handlers - Custom handlers for WebSocket events. + * {boolean?} logPingMessages - Whether to log received pings. + * {boolean?} manual - Whether the factory should call 'connect()' automatically. + * Also named 'autoConnect' or 'startClosed' in other libraries. + * {object?} messageHandlers - Custom handlers for different message types. + * {number?} pingTimeout=45_000 - How long to wait for the server to send a ping, in milliseconds. + * {boolean?} reconnectOnDisconnection=true - Whether to reconnect after a server-side disconnection. + * {boolean?} reconnectOnOnline=true - Whether to reconnect after coming back online. + * {boolean?} reconnectOnTimeout=false - Whether to reconnect after a connection timeout. + * {number?} timeout=5_000 - Connection timeout duration in milliseconds. + * @returns {PubSubClient} + */ +function createClient(url, options = {}) { + const client = { + customEventHandlers: options.handlers || {}, + // The current number of connection attempts that failed. + // Reset to 0 upon successful connection. + // Used to compute how long to wait before the next reconnection attempt. + failedConnectionAttempts: 0, + isLocal: /\/\/(localhost|127\.0\.0\.1)([:?/]|$)/.test(url), + // True if this client has never been connected yet. + isNew: true, + listeners: Object.create(null), + messageHandlers: { ...defaultMessageHandlers, ...options.messageHandlers }, + nextConnectionAttemptDelayID: undefined, + options: { ...defaultOptions, ...options }, + // Requested subscriptions for which we didn't receive a response yet. + pendingSubscriptionSet: new Set(), + pendingUnsubscriptionSet: new Set(), + pingTimeoutID: undefined, + shouldReconnect: true, + // The underlying WebSocket object. + // A new one is necessary for every connection or reconnection attempt. + socket: null, + subscriptionSet: new Set(), + kvFilter: new Map(), + connectionTimeoutID: undefined, + url: url.replace(/^http/, 'ws'), + ...publicMethods + }; + // Create and save references to reusable event listeners. + // Every time a new underlying WebSocket object will be created for this + // client instance, these event listeners will be detached from the older + // socket then attached to the new one, hereby avoiding both unnecessary + // allocations and garbage collections of a bunch of functions every time. + // Another benefit is the ability to patch the client protocol at runtime by + // updating the client's custom event handler map. + for (const name of Object.keys(defaultClientEventHandlers)) { + client.listeners[name] = (event) => { + try { + // Use `.call()` to pass the client via the 'this' binding. + ; + defaultClientEventHandlers[name].call(client, event); + client.customEventHandlers[name]?.call(client, event); + } + catch (error) { + // Do not throw any error but emit an `error` event instead. + (0, sbp_1.default)('okTurtles.events/emit', exports.PUBSUB_ERROR, client, error?.message); + } + }; + } + // Add global event listeners before the first connection. + if (typeof self === 'object' && self instanceof EventTarget) { + for (const name of globalEventNames) { + globalEventMap.set(name, client.listeners[name]); + } + } + if (!client.options.manual) { + client.connect(); + } + return client; +} +function createMessage(type, data, meta) { + const message = { ...meta, type, data }; + let string; + const stringify = function () { + if (!string) + string = JSON.stringify(this); + return string; + }; + Object.defineProperties(message, { + [Symbol.toPrimitive]: { + value: stringify + } + }); + return message; +} +function createKvMessage(channelID, key, data) { + return JSON.stringify({ type: exports.NOTIFICATION_TYPE.KV, channelID, key, data }); +} +function createPubMessage(channelID, data) { + return JSON.stringify({ type: exports.NOTIFICATION_TYPE.PUB, channelID, data }); +} +function createRequest(type, data) { + // Had to use Object.assign() instead of object spreading to make Flow happy. + return JSON.stringify(Object.assign({ type }, data)); +} +// These handlers receive the PubSubClient instance through the `this` binding. +const defaultClientEventHandlers = { + // Emitted when the connection is closed. + close(event) { + const client = this; + console.debug('[pubsub] Event: close', event.code, event.reason); + client.failedConnectionAttempts++; + if (client.socket) { + // Remove event listeners to avoid memory leaks. + for (const name of socketEventNames) { + client.socket.removeEventListener(name, client.listeners[name]); + } + } + client.socket = null; + client.clearAllTimers(); + // This has been commented out to make the client always try to reconnect. + // See https://github.com/okTurtles/group-income/issues/1246 + /* + // See "Status Codes" https://tools.ietf.org/html/rfc6455#section-7.4 + switch (event.code) { + // TODO: verify that this list of codes is correct. + case 1000: case 1002: case 1003: case 1007: case 1008: { + client.shouldReconnect = false + break + } + default: break + } + */ + // If we should reconnect then consider our current subscriptions as pending again, + // waiting to be restored upon reconnection. + if (client.shouldReconnect) { + client.subscriptionSet.forEach((channelID) => { + // Skip contracts from which we had to unsubscribe anyway. + if (!client.pendingUnsubscriptionSet.has(channelID)) { + client.pendingSubscriptionSet.add(channelID); + } + }); + } + // We are no longer subscribed to any contracts since we are now disconnected. + client.subscriptionSet.clear(); + client.pendingUnsubscriptionSet.clear(); + if (client.shouldReconnect && client.options.reconnectOnDisconnection) { + if (client.failedConnectionAttempts > client.options.maxRetries) { + (0, sbp_1.default)('okTurtles.events/emit', exports.PUBSUB_RECONNECTION_FAILED, client); + } + else { + // If we are definetely offline then do not try to reconnect now, + // unless the server is local. + if (!isDefinetelyOffline() || client.isLocal) { + client.scheduleConnectionAttempt(); + } + } + } + }, + // Emitted when an error has occured. + // The socket will be closed automatically by the engine if necessary. + error(event) { + const client = this; + // Not all error events should be logged with console.error, for example every + // failed connection attempt generates one such event. + console.warn('[pubsub] Event: error', event); + clearTimeout(client.pingTimeoutID); + }, + // Emitted when a message is received. + // The connection will be terminated if the message is malformed or has an + // unexpected data type (e.g. binary instead of text). + message(event) { + const client = this; + const { data } = event; + if (typeof data !== 'string') { + (0, sbp_1.default)('okTurtles.events/emit', exports.PUBSUB_ERROR, client, { + message: `Wrong data type: ${typeof data}` + }); + return client.destroy(); + } + let msg = { type: '' }; + try { + msg = (0, exports.messageParser)(data); + } + catch (error) { + (0, sbp_1.default)('okTurtles.events/emit', exports.PUBSUB_ERROR, client, { + message: `Malformed message: ${error?.message}` + }); + return client.destroy(); + } + const handler = client.messageHandlers[msg.type]; + if (handler) { + handler.call(client, msg); + } + else { + throw new Error(`Unhandled message type: ${msg.type}`); + } + }, + offline() { + console.info('[pubsub] Event: offline'); + const client = this; + client.clearAllTimers(); + // Reset the connection attempt counter so that we'll start a new + // reconnection loop when we are back online. + client.failedConnectionAttempts = 0; + client.socket?.close(); + }, + online() { + console.info('[pubsub] Event: online'); + const client = this; + if (client.options.reconnectOnOnline && client.shouldReconnect) { + if (!client.socket) { + client.failedConnectionAttempts = 0; + client.scheduleConnectionAttempt(); + } + } + }, + // Emitted when the connection is established. + open() { + console.debug('[pubsub] Event: open'); + const client = this; + const { options } = this; + client.connectionTimeUsed = undefined; + client.clearAllTimers(); + (0, sbp_1.default)('okTurtles.events/emit', exports.PUBSUB_RECONNECTION_SUCCEEDED, client); + // Set it to -1 so that it becomes 0 on the next `close` event. + client.failedConnectionAttempts = -1; + client.isNew = false; + // Setup a ping timeout if required. + // It will close the connection if we don't get any message from the server. + if (options.pingTimeout > 0 && options.pingTimeout < Infinity) { + client.pingTimeoutID = setTimeout(() => { + client.socket?.close(); + }, options.pingTimeout); + } + // Send any pending subscription request. + client.pendingSubscriptionSet.forEach((channelID) => { + const kvFilter = this.kvFilter.get(channelID); + client.socket?.send(createRequest(exports.REQUEST_TYPE.SUB, kvFilter ? { channelID, kvFilter } : { channelID })); + }); + // There should be no pending unsubscription since we just got connected. + }, + 'reconnection-attempt'() { + console.info('[pubsub] Trying to reconnect...'); + }, + 'reconnection-succeeded'() { + console.info('[pubsub] Connection re-established'); + }, + 'reconnection-failed'() { + console.warn('[pubsub] Reconnection failed'); + const client = this; + client.destroy(); + }, + 'reconnection-scheduled'(event) { + const { delay, nth } = event.detail; + console.info(`[pubsub] Scheduled connection attempt ${nth} in ~${delay} ms`); + }, + 'subscription-succeeded'(event) { + const { channelID } = event.detail; + console.debug(`[pubsub] Subscribed to channel ${channelID}`); + } +}; +// These handlers receive the PubSubClient instance through the `this` binding. +const defaultMessageHandlers = { + [exports.NOTIFICATION_TYPE.ENTRY](msg) { + console.debug('[pubsub] Received ENTRY:', msg); + }, + [exports.NOTIFICATION_TYPE.PING]({ data }) { + const client = this; + if (client.options.logPingMessages) { + console.debug(`[pubsub] Ping received in ${Date.now() - Number(data)} ms`); + } + // Reply with a pong message using the same data. + // TODO: Type coercion to string because we actually support passing this + // object type, but the correct TypeScript type hasn't been written. + client.socket?.send(createMessage(exports.NOTIFICATION_TYPE.PONG, data)); + // Refresh the ping timer, waiting for the next ping. + clearTimeout(client.pingTimeoutID); + client.pingTimeoutID = setTimeout(() => { + client.socket?.close(); + }, client.options.pingTimeout); + }, + [exports.NOTIFICATION_TYPE.PUB]({ channelID, data }) { + console.log(`[pubsub] Received data from channel ${channelID}:`, data); + // No need to reply. + }, + [exports.NOTIFICATION_TYPE.KV]({ channelID, key, data }) { + console.log(`[pubsub] Received KV update from channel ${channelID} ${key}:`, data); + // No need to reply. + }, + [exports.NOTIFICATION_TYPE.SUB](msg) { + console.debug(`[pubsub] Ignoring ${msg.type} message:`, msg.data); + }, + [exports.NOTIFICATION_TYPE.UNSUB](msg) { + console.debug(`[pubsub] Ignoring ${msg.type} message:`, msg.data); + }, + [exports.RESPONSE_TYPE.ERROR]({ data }) { + const { type, channelID, reason } = data; + console.warn(`[pubsub] Received ERROR response for ${type} request to ${channelID}`); + const client = this; + switch (type) { + case exports.REQUEST_TYPE.SUB: { + console.warn(`[pubsub] Could not subscribe to ${channelID}: ${reason}`); + client.pendingSubscriptionSet.delete(channelID); + break; + } + case exports.REQUEST_TYPE.UNSUB: { + console.warn(`[pubsub] Could not unsubscribe from ${channelID}: ${reason}`); + client.pendingUnsubscriptionSet.delete(channelID); + break; + } + case exports.REQUEST_TYPE.PUSH_ACTION: { + const { actionType, message } = data; + console.warn(`[pubsub] Received ERROR for PUSH_ACTION request with the action type '${actionType}' and the following message: ${message}`); + break; + } + default: { + console.error(`[pubsub] Malformed response: invalid request type ${type}`); + } + } + }, + [exports.RESPONSE_TYPE.OK]({ data: { type, channelID } }) { + const client = this; + switch (type) { + case exports.REQUEST_TYPE.SUB: { + client.pendingSubscriptionSet.delete(channelID); + client.subscriptionSet.add(channelID); + (0, sbp_1.default)('okTurtles.events/emit', exports.PUBSUB_SUBSCRIPTION_SUCCEEDED, client, { channelID }); + break; + } + case exports.REQUEST_TYPE.UNSUB: { + console.debug(`[pubsub] Unsubscribed from ${channelID}`); + client.pendingUnsubscriptionSet.delete(channelID); + client.subscriptionSet.delete(channelID); + client.kvFilter.delete(channelID); + break; + } + case exports.REQUEST_TYPE.KV_FILTER: { + console.debug(`[pubsub] Set KV filter for ${channelID}`); + break; + } + default: { + console.error(`[pubsub] Malformed response: invalid request type ${type}`); + } + } + } +}; +const globalEventNames = ['offline', 'online']; +const socketEventNames = ['close', 'error', 'message', 'open']; +// eslint-disable-next-line func-call-spacing +const globalEventMap = new Map(); +if (typeof self === 'object' && self instanceof EventTarget) { + // We need to do things in this roundabout way because Chrome doesn't like + // these events handlers not being top-level. + // `Event handler of 'online' event must be added on the initial evaluation of worker script.` + for (const name of globalEventNames) { + const handler = (ev) => { + const h = globalEventMap.get(name); + return h?.(ev); + }; + self.addEventListener(name, handler, false); + } +} +// `navigator.onLine` can give confusing false positives when `true`, +// so we'll define `isDefinetelyOffline()` rather than `isOnline()` or `isOffline()`. +// See https://developer.mozilla.org/en-US/docs/Web/API/Navigator/onLine +const isDefinetelyOffline = () => typeof navigator === 'object' && navigator.onLine === false; +// Parses and validates a received message. +const messageParser = (data) => { + const msg = JSON.parse(data); + if (typeof msg !== 'object' || msg === null) { + throw new TypeError('Message is null or not an object'); + } + const { type } = msg; + if (typeof type !== 'string' || type === '') { + throw new TypeError('Message type must be a non-empty string'); + } + return msg; +}; +exports.messageParser = messageParser; +const publicMethods = { + clearAllTimers() { + const client = this; + clearTimeout(client.connectionTimeoutID); + clearTimeout(client.nextConnectionAttemptDelayID); + clearTimeout(client.pingTimeoutID); + client.connectionTimeoutID = undefined; + client.nextConnectionAttemptDelayID = undefined; + client.pingTimeoutID = undefined; + }, + // Performs a connection or reconnection attempt. + connect() { + const client = this; + if (client.socket !== null) { + throw new Error('connect() can only be called if there is no current socket.'); + } + if (client.nextConnectionAttemptDelayID) { + throw new Error('connect() must not be called during a reconnection delay.'); + } + if (!client.shouldReconnect) { + throw new Error('connect() should no longer be called on this instance.'); + } + client.socket = new WebSocket(client.url); + // Sometimes (like when using `createMessage`), we want to send objects that + // are serialized as strings. Native web sockets don't support objects, so + // we use this workaround. + client.socket.send = function (data) { + const send = WebSocket.prototype.send.bind(this); + if (typeof data === 'object' && + typeof data[Symbol.toPrimitive] === 'function') { + return send(data[Symbol.toPrimitive]()); + } + return send(data); + }; + if (client.options.timeout) { + const start = performance.now(); + client.connectionTimeoutID = setTimeout(() => { + client.connectionTimeoutID = undefined; + if (client.options.reconnectOnTimeout) { + client.connectionTimeUsed = performance.now() - start; + } + client.socket?.close(4000, 'timeout'); + }, client.options.timeout); + } + // Attach WebSocket event listeners. + for (const name of socketEventNames) { + client.socket.addEventListener(name, client.listeners[name]); + } + }, + /** + * Immediately close the socket, stop listening for events and clear any cache. + * + * This method is used in unit tests. + * - In particular, no 'close' event handler will be called. + * - Any incoming or outgoing buffered data will be discarded. + * - Any pending messages will be discarded. + */ + destroy() { + const client = this; + client.clearAllTimers(); + // Update property values. + // Note: do not clear 'client.options'. + client.pendingSubscriptionSet.clear(); + client.pendingUnsubscriptionSet.clear(); + client.subscriptionSet.clear(); + // Remove global event listeners. + if (typeof self === 'object' && self instanceof EventTarget) { + for (const name of globalEventNames) { + globalEventMap.delete(name); + } + } + // Remove WebSocket event listeners. + if (client.socket) { + for (const name of socketEventNames) { + client.socket.removeEventListener(name, client.listeners[name]); + } + client.socket.close(); + } + client.listeners = Object.create(null); + client.socket = null; + client.shouldReconnect = false; + }, + getNextRandomDelay() { + const client = this; + const { maxReconnectionDelay, minReconnectionDelay, reconnectionDelayGrowFactor } = client.options; + const minDelay = minReconnectionDelay * reconnectionDelayGrowFactor ** client.failedConnectionAttempts; + const maxDelay = minDelay * reconnectionDelayGrowFactor; + const connectionTimeUsed = client.connectionTimeUsed; + client.connectionTimeUsed = undefined; + return Math.min( + // See issue #1943: Have the connection time used 'eat into' the + // reconnection time used + Math.max(minReconnectionDelay, connectionTimeUsed ? maxReconnectionDelay - connectionTimeUsed : maxReconnectionDelay), Math.round(minDelay + (0, Math.random)() * (maxDelay - minDelay))); + }, + // Schedules a connection attempt to happen after a delay computed according to + // a randomized exponential backoff algorithm variant. + scheduleConnectionAttempt() { + const client = this; + if (!client.shouldReconnect) { + throw new Error('Cannot call `scheduleConnectionAttempt()` when `shouldReconnect` is false.'); + } + if (client.nextConnectionAttemptDelayID) { + return console.warn('[pubsub] A reconnection attempt is already scheduled.'); + } + const delay = client.getNextRandomDelay(); + const nth = client.failedConnectionAttempts + 1; + client.nextConnectionAttemptDelayID = setTimeout(() => { + (0, sbp_1.default)('okTurtles.events/emit', exports.PUBSUB_RECONNECTION_ATTEMPT, client); + client.nextConnectionAttemptDelayID = undefined; + client.connect(); + }, delay); + (0, sbp_1.default)('okTurtles.events/emit', exports.PUBSUB_RECONNECTION_SCHEDULED, client, { delay, nth }); + }, + // Can be used to send ephemeral messages outside of any contract log. + // Does nothing if the socket is not in the OPEN state. + pub(channelID, data) { + if (this.socket?.readyState === WebSocket.OPEN) { + this.socket.send(createPubMessage(channelID, data)); + } + }, + /** + * Sends a SUB request to the server as soon as possible. + * + * - The given channel ID will be cached until we get a relevant server + * response, allowing us to resend the same request if necessary. + * - Any identical UNSUB request that has not been sent yet will be cancelled. + * - Calling this method again before the server has responded has no effect. + * @param channelID - The ID of the channel whose updates we want to subscribe to. + */ + sub(channelID) { + const client = this; + const { socket } = this; + if (!client.pendingSubscriptionSet.has(channelID)) { + client.pendingSubscriptionSet.add(channelID); + client.pendingUnsubscriptionSet.delete(channelID); + if (socket?.readyState === WebSocket.OPEN) { + const kvFilter = client.kvFilter.get(channelID); + socket.send(createRequest(exports.REQUEST_TYPE.SUB, kvFilter ? { channelID, kvFilter } : { channelID })); + } + } + }, + /** + * Sends a KV_FILTER request to the server as soon as possible. + */ + setKvFilter(channelID, kvFilter) { + const client = this; + const { socket } = this; + if (kvFilter) { + client.kvFilter.set(channelID, kvFilter); + } + else { + client.kvFilter.delete(channelID); + } + if (client.subscriptionSet.has(channelID)) { + if (socket?.readyState === WebSocket.OPEN) { + socket.send(createRequest(exports.REQUEST_TYPE.KV_FILTER, kvFilter ? { channelID, kvFilter } : { channelID })); + } + } + }, + /** + * Sends an UNSUB request to the server as soon as possible. + * + * - The given channel ID will be cached until we get a relevant server + * response, allowing us to resend the same request if necessary. + * - Any identical SUB request that has not been sent yet will be cancelled. + * - Calling this method again before the server has responded has no effect. + * @param channelID - The ID of the channel whose updates we want to unsubscribe from. + */ + unsub(channelID) { + const client = this; + const { socket } = this; + if (!client.pendingUnsubscriptionSet.has(channelID)) { + client.pendingSubscriptionSet.delete(channelID); + client.pendingUnsubscriptionSet.add(channelID); + if (socket?.readyState === WebSocket.OPEN) { + socket.send(createRequest(exports.REQUEST_TYPE.UNSUB, { channelID })); + } + } + } +}; +// Register custom SBP event listeners before the first connection. +for (const name of Object.keys(defaultClientEventHandlers)) { + if (name === 'error' || !socketEventNames.includes(name)) { + (0, sbp_1.default)('okTurtles.events/on', `pubsub-${name}`, (target, detail) => { + const ev = new CustomEvent(name, { detail }); + target.listeners[name].call(target, ev); + }); + } +} +exports.default = { + NOTIFICATION_TYPE: exports.NOTIFICATION_TYPE, + REQUEST_TYPE: exports.REQUEST_TYPE, + RESPONSE_TYPE: exports.RESPONSE_TYPE, + createClient, + createMessage, + createRequest +}; diff --git a/dist/cjs/pubsub/index.d.cts b/dist/cjs/pubsub/index.d.cts new file mode 100644 index 0000000..ad0c615 --- /dev/null +++ b/dist/cjs/pubsub/index.d.cts @@ -0,0 +1,220 @@ +import '@sbp/okturtles.events'; +import type { JSONObject, JSONType } from '../types.cjs'; +export declare const NOTIFICATION_TYPE: Readonly<{ + ENTRY: "entry"; + DELETION: "deletion"; + KV: "kv"; + KV_FILTER: "kv_filter"; + PING: "ping"; + PONG: "pong"; + PUB: "pub"; + SUB: "sub"; + UNSUB: "unsub"; + VERSION_INFO: "version_info"; +}>; +export declare const REQUEST_TYPE: Readonly<{ + PUB: "pub"; + SUB: "sub"; + UNSUB: "unsub"; + PUSH_ACTION: "push_action"; + KV_FILTER: "kv_filter"; +}>; +export declare const RESPONSE_TYPE: Readonly<{ + ERROR: "error"; + OK: "ok"; +}>; +export declare const PUSH_SERVER_ACTION_TYPE: Readonly<{ + SEND_PUBLIC_KEY: "send-public-key"; + STORE_SUBSCRIPTION: "store-subscription"; + DELETE_SUBSCRIPTION: "delete-subscription"; + SEND_PUSH_NOTIFICATION: "send-push-notification"; +}>; +export type NotificationTypeEnum = typeof NOTIFICATION_TYPE[keyof typeof NOTIFICATION_TYPE]; +export type RequestTypeEnum = typeof REQUEST_TYPE[keyof typeof REQUEST_TYPE]; +export type ResponseTypeEnum = typeof RESPONSE_TYPE[keyof typeof RESPONSE_TYPE]; +type TimeoutID = ReturnType; +export type Options = { + logPingMessages: boolean; + pingTimeout: number; + maxReconnectionDelay: number; + maxRetries: number; + minReconnectionDelay: number; + reconnectOnDisconnection: boolean; + reconnectOnOnline: boolean; + reconnectOnTimeout: boolean; + reconnectionDelayGrowFactor: number; + timeout: number; + manual?: boolean; + handlers?: Partial; + messageHandlers?: Partial; +}; +export type Message = { + [key: string]: JSONType; + type: string; +}; +export type PubSubClient = { + connectionTimeoutID: TimeoutID | undefined; + connectionTimeUsed?: number; + customEventHandlers: Partial; + failedConnectionAttempts: number; + isLocal: boolean; + isNew: boolean; + listeners: ClientEventHandlers; + messageHandlers: MessageHandlers; + nextConnectionAttemptDelayID: TimeoutID | undefined; + options: Options; + pendingSubscriptionSet: Set; + pendingUnsubscriptionSet: Set; + pingTimeoutID: TimeoutID | undefined; + shouldReconnect: boolean; + socket: WebSocket | null; + subscriptionSet: Set; + kvFilter: Map; + url: string; + clearAllTimers(this: PubSubClient): void; + connect(this: PubSubClient): void; + destroy(this: PubSubClient): void; + pub(this: PubSubClient, channelID: string, data: JSONType): void; + scheduleConnectionAttempt(this: PubSubClient): void; + sub(this: PubSubClient, channelID: string): void; + unsub(this: PubSubClient, channelID: string): void; + getNextRandomDelay(this: PubSubClient): number; + setKvFilter(this: PubSubClient, channelID: string, kvFilter?: string[]): void; +}; +type ClientEventHandlers = { + close(this: PubSubClient, event: CloseEvent): void; + error(this: PubSubClient, event: Event): void; + message(this: PubSubClient, event: MessageEvent): void; + offline(this: PubSubClient, event: Event): void; + online(this: PubSubClient, event: Event): void; + open(this: PubSubClient, event: Event): void; + 'reconnection-attempt'(this: PubSubClient, event: CustomEvent): void; + 'reconnection-succeeded'(this: PubSubClient, event: CustomEvent): void; + 'reconnection-failed'(this: PubSubClient, event: CustomEvent): void; + 'reconnection-scheduled'(this: PubSubClient, event: CustomEvent): void; + 'subscription-succeeded'(this: PubSubClient, event: CustomEvent): void; +}; +type MessageHandlers = { + [NOTIFICATION_TYPE.ENTRY](this: PubSubClient, msg: { + data: JSONType; + type: string; + [x: string]: unknown; + }): void; + [NOTIFICATION_TYPE.PING](this: PubSubClient, msg: { + data: JSONType; + }): void; + [NOTIFICATION_TYPE.PUB](this: PubSubClient, msg: { + channelID: string; + data: JSONType; + }): void; + [NOTIFICATION_TYPE.KV](this: PubSubClient, msg: { + channelID: string; + key: string; + data: JSONType; + }): void; + [NOTIFICATION_TYPE.SUB](this: PubSubClient, msg: { + channelID: string; + type: string; + data: JSONType; + }): void; + [NOTIFICATION_TYPE.UNSUB](this: PubSubClient, msg: { + channelID: string; + type: string; + data: JSONType; + }): void; + [RESPONSE_TYPE.ERROR](this: PubSubClient, msg: { + data: { + type: string; + channelID: string; + data: JSONType; + reason: string; + actionType?: string; + message?: string; + }; + }): void; + [RESPONSE_TYPE.OK](this: PubSubClient, msg: { + data: { + type: string; + channelID: string; + }; + }): void; +}; +export type PubMessage = { + type: 'pub'; + channelID: string; + data: JSONType; +}; +export type SubMessage = { + [key: string]: JSONType; + type: 'sub'; + channelID: string; +} & { + kvFilter?: Array; +}; +export type UnsubMessage = { + [key: string]: JSONType; + type: 'unsub'; + channelID: string; +}; +export declare const PUBSUB_ERROR = "pubsub-error"; +export declare const PUBSUB_RECONNECTION_ATTEMPT = "pubsub-reconnection-attempt"; +export declare const PUBSUB_RECONNECTION_FAILED = "pubsub-reconnection-failed"; +export declare const PUBSUB_RECONNECTION_SCHEDULED = "pubsub-reconnection-scheduled"; +export declare const PUBSUB_RECONNECTION_SUCCEEDED = "pubsub-reconnection-succeeded"; +export declare const PUBSUB_SUBSCRIPTION_SUCCEEDED = "pubsub-subscription-succeeded"; +/** + * Creates a pubsub client instance. + * + * @param {string} url - A WebSocket URL to connect to. + * @param {Object?} options + * {object?} handlers - Custom handlers for WebSocket events. + * {boolean?} logPingMessages - Whether to log received pings. + * {boolean?} manual - Whether the factory should call 'connect()' automatically. + * Also named 'autoConnect' or 'startClosed' in other libraries. + * {object?} messageHandlers - Custom handlers for different message types. + * {number?} pingTimeout=45_000 - How long to wait for the server to send a ping, in milliseconds. + * {boolean?} reconnectOnDisconnection=true - Whether to reconnect after a server-side disconnection. + * {boolean?} reconnectOnOnline=true - Whether to reconnect after coming back online. + * {boolean?} reconnectOnTimeout=false - Whether to reconnect after a connection timeout. + * {number?} timeout=5_000 - Connection timeout duration in milliseconds. + * @returns {PubSubClient} + */ +export declare function createClient(url: string, options?: Partial): PubSubClient; +export declare function createMessage(type: string, data: JSONType, meta?: object | null | undefined): { + type: string; + data: JSONType; + [x: string]: unknown; +}; +export declare function createKvMessage(channelID: string, key: string, data: JSONType): string; +export declare function createPubMessage(channelID: string, data: JSONType): string; +export declare function createRequest(type: RequestTypeEnum, data: JSONObject): string; +export declare const messageParser: (data: string) => Message; +declare const _default: { + NOTIFICATION_TYPE: Readonly<{ + ENTRY: "entry"; + DELETION: "deletion"; + KV: "kv"; + KV_FILTER: "kv_filter"; + PING: "ping"; + PONG: "pong"; + PUB: "pub"; + SUB: "sub"; + UNSUB: "unsub"; + VERSION_INFO: "version_info"; + }>; + REQUEST_TYPE: Readonly<{ + PUB: "pub"; + SUB: "sub"; + UNSUB: "unsub"; + PUSH_ACTION: "push_action"; + KV_FILTER: "kv_filter"; + }>; + RESPONSE_TYPE: Readonly<{ + ERROR: "error"; + OK: "ok"; + }>; + createClient: typeof createClient; + createMessage: typeof createMessage; + createRequest: typeof createRequest; +}; +export default _default; diff --git a/dist/cjs/signedData.cjs b/dist/cjs/signedData.cjs new file mode 100644 index 0000000..63140ff --- /dev/null +++ b/dist/cjs/signedData.cjs @@ -0,0 +1,283 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rawSignedIncomingData = exports.isRawSignedData = exports.signedDataKeyId = exports.signedIncomingData = exports.signedOutgoingDataWithRawKey = exports.signedOutgoingData = exports.isSignedData = void 0; +const crypto_1 = require("@chelonia/crypto"); +const sbp_1 = __importDefault(require("@sbp/sbp")); +const turtledash_1 = require("turtledash"); +const errors_js_1 = require("./errors.cjs"); +const functions_js_1 = require("./functions.cjs"); +const rootStateFn = () => (0, sbp_1.default)('chelonia/rootState'); +// `proto` & `wrapper` are utilities for `isSignedData` +const proto = Object.create(null, { + _isSignedData: { + value: true + } +}); +const wrapper = (o) => { + return Object.setPrototypeOf(o, proto); +}; +// `isSignedData` will return true for objects created by the various +// `signed*Data` functions. It's meant to implement functionality equivalent +// to `o instanceof SignedData` +const isSignedData = (o) => { + return !!o && !!Object.getPrototypeOf(o)?._isSignedData; +}; +exports.isSignedData = isSignedData; +// TODO: Check for permissions and allowedActions; this requires passing some +// additional context +const signData = function (stateOrContractID, sKeyId, data, extraFields, additionalKeys, additionalData) { + const state = typeof stateOrContractID === 'string' ? rootStateFn()[stateOrContractID] : stateOrContractID; + if (!additionalData) { + throw new errors_js_1.ChelErrorSignatureError('Signature additional data must be provided'); + } + // Has the key been revoked? If so, attempt to find an authorized key by the same name + const designatedKey = state?._vm?.authorizedKeys?.[sKeyId]; + if (!designatedKey?.purpose.includes('sig')) { + throw new errors_js_1.ChelErrorSignatureKeyNotFound(`Signing key ID ${sKeyId} is missing or is missing signing purpose`); + } + if (designatedKey._notAfterHeight != null) { + const name = state._vm.authorizedKeys[sKeyId].name; + const newKeyId = Object.values(state._vm?.authorizedKeys).find((v) => v._notAfterHeight == null && v.name === name && v.purpose.includes('sig'))?.id; + if (!newKeyId) { + throw new errors_js_1.ChelErrorSignatureKeyNotFound(`Signing key ID ${sKeyId} has been revoked and no new key exists by the same name (${name})`); + } + sKeyId = newKeyId; + } + const key = additionalKeys[sKeyId]; + if (!key) { + throw new errors_js_1.ChelErrorSignatureKeyNotFound(`Missing signing key ${sKeyId}`); + } + const deserializedKey = typeof key === 'string' ? (0, crypto_1.deserializeKey)(key) : key; + const serializedData = JSON.stringify(data, (_, v) => { + if (v && (0, turtledash_1.has)(v, 'serialize') && typeof v.serialize === 'function') { + if (v.serialize.length === 1) { + return v.serialize(additionalData); + } + else { + return v.serialize(); + } + } + return v; + }); + const payloadToSign = (0, functions_js_1.blake32Hash)(`${(0, functions_js_1.blake32Hash)(additionalData)}${(0, functions_js_1.blake32Hash)(serializedData)}`); + return { + ...extraFields, + _signedData: [ + serializedData, + (0, crypto_1.keyId)(deserializedKey), + (0, crypto_1.sign)(deserializedKey, payloadToSign) + ] + }; +}; +// TODO: Check for permissions and allowedActions; this requires passing the +// entire SPMessage +const verifySignatureData = function (state, height, data, additionalData) { + if (!state) { + throw new errors_js_1.ChelErrorSignatureError('Missing contract state'); + } + if (!(0, exports.isRawSignedData)(data)) { + throw new errors_js_1.ChelErrorSignatureError('Invalid message format'); + } + if (!Number.isSafeInteger(height) || height < 0) { + throw new errors_js_1.ChelErrorSignatureError(`Height ${height} is invalid or out of range`); + } + const [serializedMessage, sKeyId, signature] = data._signedData; + const designatedKey = state._vm?.authorizedKeys?.[sKeyId]; + if (!designatedKey || (height > designatedKey._notAfterHeight) || (height < designatedKey._notBeforeHeight) || !designatedKey.purpose.includes('sig')) { + // These errors (ChelErrorSignatureKeyUnauthorized) are serious and + // indicate a bug. Make them fatal when running integration tests + // (otherwise, they get swallowed and shown as a notification) + if (process.env.CI) { + console.error(`Key ${sKeyId} is unauthorized or expired for the current contract`, { designatedKey, height, state: JSON.parse(JSON.stringify((0, sbp_1.default)('state/vuex/state'))) }); + // An unhandled promise rejection will cause Cypress to fail + Promise.reject(new errors_js_1.ChelErrorSignatureKeyUnauthorized(`Key ${sKeyId} is unauthorized or expired for the current contract`)); + } + throw new errors_js_1.ChelErrorSignatureKeyUnauthorized(`Key ${sKeyId} is unauthorized or expired for the current contract`); + } + // TODO + const deserializedKey = designatedKey.data; + const payloadToSign = (0, functions_js_1.blake32Hash)(`${(0, functions_js_1.blake32Hash)(additionalData)}${(0, functions_js_1.blake32Hash)(serializedMessage)}`); + try { + (0, crypto_1.verifySignature)(deserializedKey, payloadToSign, signature); + const message = JSON.parse(serializedMessage); + return [sKeyId, message]; + } + catch (e) { + throw new errors_js_1.ChelErrorSignatureError(e?.message || e); + } +}; +const signedOutgoingData = (stateOrContractID, sKeyId, data, additionalKeys) => { + if (!stateOrContractID || data === undefined || !sKeyId) + throw new TypeError('Invalid invocation'); + if (!additionalKeys) { + additionalKeys = rootStateFn().secretKeys; + } + const extraFields = Object.create(null); + const boundStringValueFn = signData.bind(null, stateOrContractID, sKeyId, data, extraFields, additionalKeys); + const serializefn = (additionalData) => boundStringValueFn(additionalData || ''); + return wrapper({ + get signingKeyId() { + return sKeyId; + }, + get serialize() { + return serializefn; + }, + get toString() { + return (additionalData) => JSON.stringify(this.serialize(additionalData)); + }, + get valueOf() { + return () => data; + }, + get recreate() { + return (data) => (0, exports.signedOutgoingData)(stateOrContractID, sKeyId, data, additionalKeys); + }, + get get() { + return (k) => extraFields[k]; + }, + get set() { + return (k, v) => { + extraFields[k] = v; + }; + } + }); +}; +exports.signedOutgoingData = signedOutgoingData; +// Used for OP_CONTRACT as a state does not yet exist +const signedOutgoingDataWithRawKey = (key, data) => { + const sKeyId = (0, crypto_1.keyId)(key); + const state = { + _vm: { + authorizedKeys: { + [sKeyId]: { + purpose: ['sig'], + data: (0, crypto_1.serializeKey)(key, false), + _notBeforeHeight: 0, + _notAfterHeight: undefined + } + } + } + }; + const extraFields = Object.create(null); + const boundStringValueFn = signData.bind(null, state, sKeyId, data, extraFields, { [sKeyId]: key }); + const serializefn = (additionalData) => boundStringValueFn(additionalData || ''); + return wrapper({ + get signingKeyId() { + return sKeyId; + }, + get serialize() { + return serializefn; + }, + get toString() { + return (additionalData) => JSON.stringify(this.serialize(additionalData)); + }, + get valueOf() { + return () => data; + }, + get recreate() { + return (data) => (0, exports.signedOutgoingDataWithRawKey)(key, data); + }, + get get() { + return (k) => extraFields[k]; + }, + get set() { + return (k, v) => { + extraFields[k] = v; + }; + } + }); +}; +exports.signedOutgoingDataWithRawKey = signedOutgoingDataWithRawKey; +const signedIncomingData = (contractID, state, data, height, additionalData, mapperFn) => { + const stringValueFn = () => data; + let verifySignedValue; + const verifySignedValueFn = () => { + if (verifySignedValue) { + return verifySignedValue[1]; + } + verifySignedValue = verifySignatureData(state || rootStateFn()[contractID], height, data, additionalData); + if (mapperFn) + verifySignedValue[1] = mapperFn(verifySignedValue[1]); + return verifySignedValue[1]; + }; + return wrapper({ + get signingKeyId() { + if (verifySignedValue) + return verifySignedValue[0]; + return (0, exports.signedDataKeyId)(data); + }, + get serialize() { + return stringValueFn; + }, + get context() { + return [contractID, data, height, additionalData]; + }, + get toString() { + return () => JSON.stringify(this.serialize()); + }, + get valueOf() { + return verifySignedValueFn; + }, + get toJSON() { + return this.serialize; + }, + get get() { + return (k) => k !== '_signedData' ? data[k] : undefined; + } + }); +}; +exports.signedIncomingData = signedIncomingData; +const signedDataKeyId = (data) => { + if (!(0, exports.isRawSignedData)(data)) { + throw new errors_js_1.ChelErrorSignatureError('Invalid message format'); + } + return data._signedData[1]; +}; +exports.signedDataKeyId = signedDataKeyId; +const isRawSignedData = (data) => { + if (!data || typeof data !== 'object' || !(0, turtledash_1.has)(data, '_signedData') || !Array.isArray(data._signedData) || data._signedData.length !== 3 || data._signedData.map(v => typeof v).filter(v => v !== 'string').length !== 0) { + return false; + } + return true; +}; +exports.isRawSignedData = isRawSignedData; +// WARNING: The following function (rawSignedIncomingData) will not check signatures +const rawSignedIncomingData = (data) => { + if (!(0, exports.isRawSignedData)(data)) { + throw new errors_js_1.ChelErrorSignatureError('Invalid message format'); + } + const stringValueFn = () => data; + let verifySignedValue; + const verifySignedValueFn = () => { + if (verifySignedValue) { + return verifySignedValue[1]; + } + verifySignedValue = [data._signedData[1], JSON.parse(data._signedData[0])]; + return verifySignedValue[1]; + }; + return wrapper({ + get signingKeyId() { + if (verifySignedValue) + return verifySignedValue[0]; + return (0, exports.signedDataKeyId)(data); + }, + get serialize() { + return stringValueFn; + }, + get toString() { + return () => JSON.stringify(this.serialize()); + }, + get valueOf() { + return verifySignedValueFn; + }, + get toJSON() { + return this.serialize; + }, + get get() { + return (k) => k !== '_signedData' ? data[k] : undefined; + } + }); +}; +exports.rawSignedIncomingData = rawSignedIncomingData; diff --git a/dist/cjs/signedData.d.cts b/dist/cjs/signedData.d.cts new file mode 100644 index 0000000..5c95eef --- /dev/null +++ b/dist/cjs/signedData.d.cts @@ -0,0 +1,32 @@ +import type { Key } from '@chelonia/crypto'; +import type { ChelContractState } from './types.cjs'; +export interface SignedData { + signingKeyId: string; + valueOf: () => T; + serialize: (additionalData?: string) => U & { + _signedData: [string, string, string]; + }; + context?: [string, U & { + _signedData: [string, string, string]; + }, number, string]; + toString: (additionalData?: string) => string; + recreate?: (data: T) => SignedData; + toJSON?: () => U & { + _signedData: [string, string, string]; + }; + get: (k: keyof U) => U[typeof k] | undefined; + set?: (k: keyof U, v: U[typeof k]) => void; +} +export declare const isSignedData: (o: unknown) => o is SignedData; +export declare const signedOutgoingData: (stateOrContractID: string | ChelContractState, sKeyId: string, data: T, additionalKeys?: Record) => SignedData; +export declare const signedOutgoingDataWithRawKey: (key: Key, data: T) => SignedData; +export declare const signedIncomingData: (contractID: string, state: object | null | undefined, data: U & { + _signedData: [string, string, string]; +}, height: number, additionalData: string, mapperFn?: (value: V) => T) => SignedData; +export declare const signedDataKeyId: (data: unknown) => string; +export declare const isRawSignedData: (data: unknown) => data is { + _signedData: [string, string, string]; +}; +export declare const rawSignedIncomingData: (data: U & { + _signedData: [string, string, string]; +}) => SignedData; diff --git a/dist/cjs/time-sync.cjs b/dist/cjs/time-sync.cjs new file mode 100644 index 0000000..d6d4575 --- /dev/null +++ b/dist/cjs/time-sync.cjs @@ -0,0 +1,128 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const sbp_1 = __importDefault(require("@sbp/sbp")); +// `wallBase` is the base used to calculate wall time (i.e., time elapsed as one +// would get from, e.g., looking a clock hanging from a wall). +// Although optimistically +// it has a default value to local time, it'll be updated to the server's time +// once `chelonia/private/startClockSync` is called +// From Wikipedia: 'walltime is the actual time taken from the start of a +// computer program to the end. In other words, it is the difference between +// the time at which a task finishes and the time at which the task started.' +let wallBase = Date.now(); +// `monotonicBase` is the base used to calculate an offset to apply to `wallBase` +// to estimate the server's current wall time. +let monotonicBase = performance.now(); +// `undefined` means the sync process has been stopped, `null` that the current +// request has finished +let resyncTimeout; +let watchdog; +const syncServerTime = async function () { + // Get our current monotonic time + const startTime = performance.now(); + // Now, ask the server for the time + const time = await this.config.fetch(`${this.config.connectionURL}/time`, { signal: this.abortController.signal }); + const requestTimeElapsed = performance.now(); + if (requestTimeElapsed - startTime > 8000) { + throw new Error('Error fetching server time: request took too long'); + } + // If the request didn't succeed, report it + if (!time.ok) + throw new Error('Error fetching server time'); + const serverTime = (new Date(await time.text())).valueOf(); + // If the value could not be parsed, report that as well + if (Number.isNaN(serverTime)) + throw new Error('Unable to parse server time'); + // Adjust `wallBase` based on the elapsed request time. We can't know + // how long it took for the server to respond, but we can estimate that it's + // about half the time from the moment we made the request. + const newMonotonicBase = performance.now(); + wallBase = + serverTime + + (requestTimeElapsed - startTime) / 2 + + // Also take into account the time elapsed between `requestTimeElapsed` + // and this line (which should be very little) + (newMonotonicBase - requestTimeElapsed); + monotonicBase = newMonotonicBase; +}; +exports.default = (0, sbp_1.default)('sbp/selectors/register', { + 'chelonia/private/startClockSync': function () { + if (resyncTimeout !== undefined) { + throw new Error('chelonia/private/startClockSync has already been called'); + } + // Default re-sync every 5 minutes + const resync = (delay = 300000) => { + // If there's another time sync process in progress, don't do anything + if (resyncTimeout !== null) + return; + const timeout = setTimeout(() => { + // Get the server time + syncServerTime.call(this).then(() => { + // Mark the process as finished + if (resyncTimeout === timeout) + resyncTimeout = null; + // And then restart the listener + resync(); + }).catch(e => { + // If there was an error, log it and possibly attempt again + if (resyncTimeout === timeout) { + // In this case, it was the current task that failed + resyncTimeout = null; + console.error('Error re-syncing server time; will re-attempt in 5s', e); + // Call resync again, with a shorter delay + setTimeout(() => resync(0), 5000); + } + else { + // If there is already another attempt, just log it + console.error('Error re-syncing server time; another attempt is in progress', e); + } + }); + }, delay); + resyncTimeout = timeout; + }; + let wallLast = Date.now(); + let monotonicLast = performance.now(); + // Watchdog to ensure our time doesn't drift. Periodically check for + // differences between the elapsed wall time and the elapsed monotonic + // time + watchdog = setInterval(() => { + const wallNow = Date.now(); + const monotonicNow = performance.now(); + const difference = Math.abs(Math.abs((wallNow - wallLast)) - Math.abs((monotonicNow - monotonicLast))); + // Tolerate up to a 10ms difference + if (difference > 10) { + if (resyncTimeout != null) + clearTimeout(resyncTimeout); + resyncTimeout = null; + resync(0); + } + wallLast = wallNow; + monotonicLast = monotonicNow; + }, 10000); + // Start the sync process + resyncTimeout = null; + resync(0); + }, + 'chelonia/private/stopClockSync': () => { + if (resyncTimeout !== undefined) { + if (watchdog != null) + clearInterval(watchdog); + if (resyncTimeout != null) + clearTimeout(resyncTimeout); + watchdog = undefined; + resyncTimeout = undefined; + } + }, + // Get an estimate of the server's current time based on the time elapsed as + // measured locally (using a monotonic clock), which is used as an offset, and + // a previously retrieved server time. The time value is returned as a UNIX + // _millisecond_ timestamp (milliseconds since 1 Jan 1970 00:00:00 UTC) + 'chelonia/time': function () { + const monotonicNow = performance.now(); + const wallNow = wallBase - monotonicBase + monotonicNow; + return Math.round(wallNow); + } +}); diff --git a/dist/cjs/time-sync.d.cts b/dist/cjs/time-sync.d.cts new file mode 100644 index 0000000..d451d2b --- /dev/null +++ b/dist/cjs/time-sync.d.cts @@ -0,0 +1,2 @@ +declare const _default: string[]; +export default _default; diff --git a/dist/cjs/types.cjs b/dist/cjs/types.cjs new file mode 100644 index 0000000..241fe60 --- /dev/null +++ b/dist/cjs/types.cjs @@ -0,0 +1,3 @@ +"use strict"; +/* eslint-disable no-use-before-define */ +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/dist/cjs/types.d.cts b/dist/cjs/types.d.cts new file mode 100644 index 0000000..4146e39 --- /dev/null +++ b/dist/cjs/types.d.cts @@ -0,0 +1,324 @@ +import type { Key } from '@chelonia/crypto'; +import type sbp from '@sbp/sbp'; +import type { SPMessage, SPMsgDirection, SPOpType } from './SPMessage.cjs'; +import type { EncryptedData } from './encryptedData.cjs'; +import type { PubSubClient } from './pubsub/index.cjs'; +export type JSONType = null | string | number | boolean | JSONObject | JSONArray; +export interface JSONObject { + [x: string]: JSONType; +} +export type JSONArray = Array; +export type ResType = ResTypeErr | ResTypeOK | ResTypeAlready | ResTypeSub | ResTypeUnsub | ResTypeEntry | ResTypePub; +export type ResTypeErr = 'error'; +export type ResTypeOK = 'success'; +export type ResTypeAlready = 'already'; +export type ResTypeSub = 'sub'; +export type ResTypeUnsub = 'unsub'; +export type ResTypePub = 'pub'; +export type ResTypeEntry = 'entry'; +export type CheloniaConfig = { + [_ in `preOp_${SPOpType}`]?: (message: SPMessage, state: ChelContractState) => boolean; +} & { + [_ in `postOp_${SPOpType}`]?: (message: SPMessage, state: ChelContractState) => boolean; +} & { + connectionURL: string; + stateSelector: string; + contracts: { + defaults: { + modules: Record; + exposedGlobals: object; + allowedDomains: string[]; + allowedSelectors: string[]; + preferSlim: boolean; + }; + overrides: object; + manifests: Record; + }; + whitelisted: (action: string) => boolean; + reactiveSet: (obj: T, key: keyof T, value: T[typeof key]) => void; + fetch: typeof fetch; + reactiveDel: (obj: T, key: keyof T) => void; + acceptAllMessages: boolean; + skipActionProcessing: boolean; + skipSideEffects: boolean; + strictProcessing: boolean; + strictOrdering: boolean; + connectionOptions: { + maxRetries: number; + reconnectOnTimeout: boolean; + }; + preOp?: (message: SPMessage, state: ChelContractState) => boolean; + postOp?: (message: SPMessage, state: ChelContractState) => boolean; + hooks: Partial<{ + preHandleEvent: { + (message: SPMessage): Promise; + } | null; + postHandleEvent: { + (message: SPMessage): Promise; + } | null; + processError: { + (e: unknown, message: SPMessage | null | undefined, meta: object | null | undefined): void; + } | null; + sideEffectError: { + (e: unknown, message?: SPMessage): void; + } | null; + handleEventError: { + (e: unknown, message?: SPMessage): void; + } | null; + syncContractError: { + (e: unknown, contractID: string): void; + } | null; + pubsubError: { + (e: unknown, socket: PubSubClient): void; + } | null; + }>; + skipDecryptionAttempts: boolean; + unwrapMaybeEncryptedData: (data: T | EncryptedData) => { + encryptionKeyId: string | null; + data: T; + } | undefined; +}; +export type SendMessageHooks = Partial<{ + prepublish: (entry: SPMessage) => void | Promise; + onprocessed: (entry: SPMessage) => void; + preSendCheck: (entry: SPMessage, state: ChelContractState) => boolean | Promise; + beforeRequest: (newEntry: SPMessage, oldEntry: SPMessage) => void | Promise; + postpublish: (entry: SPMessage) => void | Promise; +}>; +export type ChelContractProcessMessageObject = Readonly<{ + data: object; + meta: object; + hash: string; + height: number; + contractID: string; + direction: SPMsgDirection; + signingKeyId: string; + signingContractID: string; + innerSigningKeyId?: string | null | undefined; + innerSigningContractID?: string | null | undefined; +}>; +export type ChelContractSideeffectMutationObject = Readonly<{ + data: object; + meta: object; + hash: string; + height: number; + contractID: string; + description: string; + direction: SPMsgDirection; + signingKeyId: string; + signingContractID: string; + innerSigningKeyId?: string | null | undefined; + innerSigningContractID?: string | null | undefined; +}>; +export type CheloniaContractCtx = { + getters: Record(state: ChelContractState, obj: T) => T[K]>; + name: string; + manifest: string; + metadata: { + create: () => object | Promise; + validate: (meta: object, { state, contractID, ...gProxy }: { + state: ChelContractState; + contractID: string; + }) => void | Promise; + }; + sbp: typeof sbp; + state: (contractID: string) => ChelContractState; + actions: Record void | Promise; + process: (message: ChelContractProcessMessageObject, { state, ...gProxy }: { + state: ChelContractState; + }) => void | Promise; + sideEffect?: (mutation: ChelContractSideeffectMutationObject, { state, ...gProxy }: { + state: ChelContractState; + }) => void | Promise; + }>; + methods: Record; +}; +export type CheloniaContext = { + config: CheloniaConfig; + _instance: object; + abortController: AbortController; + state: { + contracts: Record; + pending: string[]; + [x: string]: unknown; + }; + manifestToContract: Record; + whitelistedActions: Record; + currentSyncs: Record; + postSyncOperations: Record>>; + sideEffectStacks: Record[]>; + sideEffectStack: (contractID: string) => Array>; + setPostSyncOp: (contractID: string, key: string, op: Parameters) => void; + transientSecretKeys: Record; + ephemeralReferenceCount: Record; + subscriptionSet: Set; + pending: { + contractID: string; + }[]; + pubsub: import('./pubsub/index.cjs').PubSubClient; + contractsModifiedListener: (contracts: Set, { added, removed }: { + added: string[]; + removed: string[]; + }) => void; + defContractSelectors: string[]; + defContractManifest: string; + defContractSBP: typeof sbp; + defContract: CheloniaContractCtx; +}; +export type ChelContractManifestBody = { + name: string; + version: string; + contract: { + hash: string; + file: string; + }; + contractSlim: { + hash: string; + file: string; + }; + signingKeys: string[]; +}; +export type ChelContractManifest = { + head: string; + body: string; + signature: { + keyId: string; + value: string; + }; +}; +export type ChelFileManifest = { + version: '1.0.0'; + type?: string; + meta?: unknown; + cipher: string; + 'cipher-params'?: unknown; + size: number; + chunks: [number, string][]; + 'name-map'?: Record; + alternatives?: Record; +}; +export type ChelContractKey = { + id: string; + name: string; + purpose: string[]; + ringLevel: number; + permissions: '*' | string[]; + allowedActions?: '*' | string[]; + _notBeforeHeight: number; + _notAfterHeight?: number | undefined; + _private?: string; + foreignKey?: string; + meta?: { + quantity?: number; + expires?: number; + private?: { + transient?: boolean; + content?: string; + shareable?: boolean; + oldKeys?: string; + }; + keyRequest?: { + contractID: string; + reference: string; + responded: string; + }; + }; + data: string; +}; +export type ChelContractState = { + _vm: { + authorizedKeys: Record; + invites?: Record; + type: string; + pendingWatch?: Record; + keyshares?: Record; + sharedKeyIds?: { + id: string; + contractID: string; + height: number; + keyRequestHash?: string; + keyRequestHeight?: number; + }[]; + pendingKeyshares?: Record; + props?: Record; + }; + _volatile?: { + pendingKeyRequests?: { + contractID: string; + hash: string; + name: string; + reference?: string; + }[]; + pendingKeyRevocations?: Record; + watch?: [fkName: string, fkId: string][]; + dirty?: boolean; + resyncing?: boolean; + }; +}; +export type ChelRootState = { + [x: string]: ChelContractState; +} & { + contracts: Record; +}; +export type Response = { + type: ResType; + err?: string; + data?: JSONType; +}; +export type ParsedEncryptedOrUnencryptedMessage = Readonly<{ + contractID: string; + innerSigningKeyId?: string | null | undefined; + encryptionKeyId?: string | null | undefined; + signingKeyId: string; + data: T; + signingContractID?: string | null | undefined; + innerSigningContractID?: string | null | undefined; +}>; +export type ChelKvOnConflictCallback = (args: { + contractID: string; + key: string; + failedData?: JSONType; + status: number; + etag: string | null | undefined; + currentData: JSONType | undefined; + currentValue: ParsedEncryptedOrUnencryptedMessage | undefined; +}) => Promise<[JSONType, string]>; diff --git a/dist/cjs/utils.cjs b/dist/cjs/utils.cjs new file mode 100644 index 0000000..c3cc494 --- /dev/null +++ b/dist/cjs/utils.cjs @@ -0,0 +1,848 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.handleFetchResult = exports.logEvtError = exports.collectEventStream = exports.checkCanBeGarbageCollected = exports.reactiveClearObject = exports.clearObject = exports.getContractIDfromKeyId = exports.recreateEvent = exports.subscribeToForeignKeyContracts = exports.keyAdditionProcessor = exports.validateKeyUpdatePermissions = exports.validateKeyDelPermissions = exports.validateKeyAddPermissions = exports.validateKeyPermissions = exports.findSuitablePublicKeyIds = exports.findContractIDByForeignKeyId = exports.findSuitableSecretKeyId = exports.findRevokedKeyIdsByName = exports.findForeignKeysByContractID = exports.findKeyIdByName = void 0; +exports.eventsAfter = eventsAfter; +exports.buildShelterAuthorizationHeader = buildShelterAuthorizationHeader; +exports.verifyShelterAuthorizationHeader = verifyShelterAuthorizationHeader; +const crypto_1 = require("@chelonia/crypto"); +const sbp_1 = __importDefault(require("@sbp/sbp")); +const turtledash_1 = require("turtledash"); +const SPMessage_js_1 = require("./SPMessage.cjs"); +const Secret_js_1 = require("./Secret.cjs"); +const constants_js_1 = require("./constants.cjs"); +const errors_js_1 = require("./errors.cjs"); +const events_js_1 = require("./events.cjs"); +const functions_js_1 = require("./functions.cjs"); +const signedData_js_1 = require("./signedData.cjs"); +const MAX_EVENTS_AFTER = Number.parseInt(process.env.MAX_EVENTS_AFTER || '', 10) || Infinity; +const findKeyIdByName = (state, name) => state._vm?.authorizedKeys && Object.values((state._vm.authorizedKeys)).find((k) => k.name === name && k._notAfterHeight == null)?.id; +exports.findKeyIdByName = findKeyIdByName; +const findForeignKeysByContractID = (state, contractID) => state._vm?.authorizedKeys && ((Object.values((state._vm.authorizedKeys)))).filter((k) => k._notAfterHeight == null && k.foreignKey?.includes(contractID)).map(k => k.id); +exports.findForeignKeysByContractID = findForeignKeysByContractID; +const findRevokedKeyIdsByName = (state, name) => state._vm?.authorizedKeys && ((Object.values((state._vm.authorizedKeys) || {}))).filter((k) => k.name === name && k._notAfterHeight != null).map(k => k.id); +exports.findRevokedKeyIdsByName = findRevokedKeyIdsByName; +const findSuitableSecretKeyId = (state, permissions, purposes, ringLevel, allowedActions) => { + return state._vm?.authorizedKeys && + Object.values((state._vm.authorizedKeys)) + .filter((k) => { + return k._notAfterHeight == null && + (k.ringLevel <= (ringLevel ?? Number.POSITIVE_INFINITY)) && + (0, sbp_1.default)('chelonia/haveSecretKey', k.id) && + (Array.isArray(permissions) + ? permissions.reduce((acc, permission) => acc && (k.permissions === '*' || k.permissions.includes(permission)), true) + : permissions === k.permissions) && + purposes.reduce((acc, purpose) => acc && k.purpose.includes(purpose), true) && + (Array.isArray(allowedActions) + ? allowedActions.reduce((acc, action) => acc && (k.allowedActions === '*' || !!k.allowedActions?.includes(action)), true) + : allowedActions ? allowedActions === k.allowedActions : true); + }) + .sort((a, b) => b.ringLevel - a.ringLevel)[0]?.id; +}; +exports.findSuitableSecretKeyId = findSuitableSecretKeyId; +const findContractIDByForeignKeyId = (state, keyId) => { + let fk; + if (!keyId || !(fk = state?._vm?.authorizedKeys?.[keyId]?.foreignKey)) + return; + try { + const fkUrl = new URL(fk); + return fkUrl.pathname; + } + catch { } +}; +exports.findContractIDByForeignKeyId = findContractIDByForeignKeyId; +// TODO: Resolve inviteKey being added (doesn't have krs permission) +const findSuitablePublicKeyIds = (state, permissions, purposes, ringLevel) => { + return state._vm?.authorizedKeys && + Object.values((state._vm.authorizedKeys)).filter((k) => (k._notAfterHeight == null) && + (k.ringLevel <= (ringLevel ?? Number.POSITIVE_INFINITY)) && + (Array.isArray(permissions) + ? permissions.reduce((acc, permission) => acc && (k.permissions === '*' || k.permissions.includes(permission)), true) + : permissions === k.permissions) && + purposes.reduce((acc, purpose) => acc && k.purpose.includes(purpose), true)) + .sort((a, b) => b.ringLevel - a.ringLevel) + .map((k) => k.id); +}; +exports.findSuitablePublicKeyIds = findSuitablePublicKeyIds; +const validateActionPermissions = (msg, signingKey, state, opT, opV) => { + const data = (0, signedData_js_1.isSignedData)(opV) + ? opV.valueOf() + : opV; + if (signingKey.allowedActions !== '*' && (!Array.isArray(signingKey.allowedActions) || + !signingKey.allowedActions.includes(data.action))) { + (0, exports.logEvtError)(msg, `Signing key ${signingKey.id} is not allowed for action ${data.action}`); + return false; + } + if ((0, signedData_js_1.isSignedData)(opV)) { + const s = opV; + const innerSigningKey = state._vm?.authorizedKeys?.[s.signingKeyId]; + // For outgoing messages, we may be using an inner signing key that isn't + // available for us to see. In this case, we ignore the missing key. + // For incoming messages, we must check permissions and a missing + // key means no permissions. + if (!innerSigningKey && msg._direction === 'outgoing') + return true; + if (!innerSigningKey || + !Array.isArray(innerSigningKey.purpose) || + !innerSigningKey.purpose.includes('sig') || + (innerSigningKey.permissions !== '*' && + (!Array.isArray(innerSigningKey.permissions) || + !innerSigningKey.permissions.includes(opT + '#inner')))) { + (0, exports.logEvtError)(msg, `Signing key ${s.signingKeyId} is missing permissions for operation ${opT}`); + return false; + } + if (innerSigningKey.allowedActions !== '*' && (!Array.isArray(innerSigningKey.allowedActions) || + !innerSigningKey.allowedActions.includes(data.action + '#inner'))) { + (0, exports.logEvtError)(msg, `Signing key ${innerSigningKey.id} is not allowed for action ${data.action}`); + return false; + } + } + return true; +}; +const validateKeyPermissions = (msg, config, state, signingKeyId, opT, opV) => { + const signingKey = state._vm?.authorizedKeys?.[signingKeyId]; + if (!signingKey || + !Array.isArray(signingKey.purpose) || + !signingKey.purpose.includes('sig') || + (signingKey.permissions !== '*' && + (!Array.isArray(signingKey.permissions) || + !signingKey.permissions.includes(opT)))) { + (0, exports.logEvtError)(msg, `Signing key ${signingKeyId} is missing permissions for operation ${opT}`); + return false; + } + if (opT === SPMessage_js_1.SPMessage.OP_ACTION_UNENCRYPTED && + !validateActionPermissions(msg, signingKey, state, opT, opV)) { + return false; + } + if (!config.skipActionProcessing && + opT === SPMessage_js_1.SPMessage.OP_ACTION_ENCRYPTED && + !validateActionPermissions(msg, signingKey, state, opT, opV.valueOf())) { + return false; + } + return true; +}; +exports.validateKeyPermissions = validateKeyPermissions; +const validateKeyAddPermissions = function (contractID, signingKey, state, v, skipPrivateCheck) { + const signingKeyPermissions = Array.isArray(signingKey.permissions) ? new Set(signingKey.permissions) : signingKey.permissions; + const signingKeyAllowedActions = Array.isArray(signingKey.allowedActions) ? new Set(signingKey.allowedActions) : signingKey.allowedActions; + if (!state._vm?.authorizedKeys?.[signingKey.id]) + throw new Error('Singing key for OP_KEY_ADD or OP_KEY_UPDATE must exist in _vm.authorizedKeys. contractID=' + contractID + ' signingKeyId=' + signingKey.id); + const localSigningKey = state._vm.authorizedKeys[signingKey.id]; + v.forEach(wk => { + const data = this.config.unwrapMaybeEncryptedData(wk); + if (!data) + return; + const k = data.data; + if (!skipPrivateCheck && signingKey._private && !data.encryptionKeyId) { + throw new Error('Signing key is private but it tried adding a public key'); + } + if (!Number.isSafeInteger(k.ringLevel) || k.ringLevel < localSigningKey.ringLevel) { + throw new Error('Signing key has ringLevel ' + localSigningKey.ringLevel + ' but attempted to add or update a key with ringLevel ' + k.ringLevel); + } + if (signingKeyPermissions !== '*') { + if (!Array.isArray(k.permissions) || !k.permissions.reduce((acc, cv) => acc && signingKeyPermissions.has(cv), true)) { + throw new Error('Unable to add or update a key with more permissions than the signing key. signingKey permissions: ' + String(signingKey?.permissions) + '; key add permissions: ' + String(k.permissions)); + } + } + if (signingKeyAllowedActions !== '*' && k.allowedActions) { + if (!signingKeyAllowedActions || !Array.isArray(k.allowedActions) || !k.allowedActions.reduce((acc, cv) => acc && signingKeyAllowedActions.has(cv), true)) { + throw new Error('Unable to add or update a key with more allowed actions than the signing key. signingKey allowed actions: ' + String(signingKey?.allowedActions) + '; key add allowed actions: ' + String(k.allowedActions)); + } + } + }); +}; +exports.validateKeyAddPermissions = validateKeyAddPermissions; +const validateKeyDelPermissions = function (contractID, signingKey, state, v) { + if (!state._vm?.authorizedKeys?.[signingKey.id]) + throw new Error('Singing key for OP_KEY_DEL must exist in _vm.authorizedKeys. contractID=' + contractID + ' signingKeyId=' + signingKey.id); + const localSigningKey = state._vm.authorizedKeys[signingKey.id]; + v + .forEach((wid) => { + const data = this.config.unwrapMaybeEncryptedData(wid); + if (!data) + return; + const id = data.data; + const k = state._vm.authorizedKeys[id]; + if (!k) { + throw new Error('Nonexisting key ID ' + id); + } + if (signingKey._private) { + throw new Error('Signing key is private'); + } + if (!k._private !== !data.encryptionKeyId) { + throw new Error('_private attribute must be preserved'); + } + if (!Number.isSafeInteger(k.ringLevel) || k.ringLevel < localSigningKey.ringLevel) { + throw new Error('Signing key has ringLevel ' + localSigningKey.ringLevel + ' but attempted to remove a key with ringLevel ' + k.ringLevel); + } + }); +}; +exports.validateKeyDelPermissions = validateKeyDelPermissions; +const validateKeyUpdatePermissions = function (contractID, signingKey, state, v) { + const updatedMap = Object.create(null); + const keys = v.map((wuk) => { + const data = this.config.unwrapMaybeEncryptedData(wuk); + if (!data) + return undefined; + const uk = data.data; + const existingKey = state._vm.authorizedKeys[uk.oldKeyId]; + if (!existingKey) { + throw new errors_js_1.ChelErrorWarning('Missing old key ID ' + uk.oldKeyId); + } + if (!existingKey._private !== !data.encryptionKeyId) { + throw new Error('_private attribute must be preserved'); + } + if (uk.name !== existingKey.name) { + throw new Error('Name cannot be updated'); + } + if (!uk.id !== !uk.data) { + throw new Error('Both or none of the id and data attributes must be provided. Old key ID: ' + uk.oldKeyId); + } + if (uk.data && existingKey.meta?.private && !(uk.meta?.private)) { + throw new Error('Missing private key. Old key ID: ' + uk.oldKeyId); + } + if (uk.id && uk.id !== uk.oldKeyId) { + updatedMap[uk.id] = uk.oldKeyId; + } + // Discard `_notAfterHeight` and `_notBeforeHeight`, since retaining them + // can cause issues reprocessing messages. + // An example is reprocessing old messages in a chatroom using + // `chelonia/in/processMessage`: cloning `_notAfterHeight` will break key + // rotations, since the new key will have the same expiration value as the + // old key (the new key is supposed to have no expiration height). + const updatedKey = (0, turtledash_1.omit)(existingKey, ['_notAfterHeight', '_notBeforeHeight']); + // Set the corresponding updated attributes + if (uk.permissions) { + updatedKey.permissions = uk.permissions; + } + if (uk.allowedActions) { + updatedKey.allowedActions = uk.allowedActions; + } + if (uk.purpose) { + updatedKey.purpose = uk.purpose; + } + if (uk.meta) { + updatedKey.meta = uk.meta; + } + if (uk.id) { + updatedKey.id = uk.id; + } + if (uk.data) { + updatedKey.data = uk.data; + } + return updatedKey; + // eslint-disable-next-line no-use-before-define + }).filter(Boolean); + exports.validateKeyAddPermissions.call(this, contractID, signingKey, state, keys, true); + return [keys, updatedMap]; +}; +exports.validateKeyUpdatePermissions = validateKeyUpdatePermissions; +const keyAdditionProcessor = function (_msg, hash, keys, state, contractID, _signingKey, internalSideEffectStack) { + const decryptedKeys = []; + const keysToPersist = []; + const storeSecretKey = (key, decryptedKey) => { + const decryptedDeserializedKey = (0, crypto_1.deserializeKey)(decryptedKey); + const transient = !!key.meta?.private?.transient; + (0, sbp_1.default)('chelonia/storeSecretKeys', new Secret_js_1.Secret([{ + key: decryptedDeserializedKey, + // We always set this to true because this could be done from + // an outgoing message + transient: true + }])); + if (!transient) { + keysToPersist.push({ key: decryptedDeserializedKey, transient }); + } + }; + for (const wkey of keys) { + const data = this.config.unwrapMaybeEncryptedData(wkey); + if (!data) + continue; + const key = data.data; + let decryptedKey; + // Does the key have key.meta?.private? If so, attempt to decrypt it + if (key.meta?.private && key.meta.private.content) { + if (key.id && + key.meta.private.content && + !(0, sbp_1.default)('chelonia/haveSecretKey', key.id, !key.meta.private.transient)) { + const decryptedKeyResult = this.config.unwrapMaybeEncryptedData(key.meta.private.content); + // Ignore data that couldn't be decrypted + if (decryptedKeyResult) { + // Data aren't encrypted + if (decryptedKeyResult.encryptionKeyId == null) { + throw new Error('Expected encrypted data but got unencrypted data for key with ID: ' + key.id); + } + decryptedKey = decryptedKeyResult.data; + decryptedKeys.push([key.id, decryptedKey]); + storeSecretKey(key, decryptedKey); + } + } + } + // Is this a #sak + if (key.name === '#sak') { + if (data.encryptionKeyId) { + throw new Error('#sak may not be encrypted'); + } + if (key.permissions && (!Array.isArray(key.permissions) || key.permissions.length !== 0)) { + throw new Error('#sak may not have permissions'); + } + if (!Array.isArray(key.purpose) || key.purpose.length !== 1 || key.purpose[0] !== 'sak') { + throw new Error("#sak must have exactly one purpose: 'sak'"); + } + if (key.ringLevel !== 0) { + throw new Error('#sak must have ringLevel 0'); + } + } + // Is this a an invite key? If so, run logic for invite keys and invitation + // accounting + if (key.name.startsWith('#inviteKey-')) { + if (!state._vm.invites) + state._vm.invites = Object.create(null); + const inviteSecret = decryptedKey || ((0, turtledash_1.has)(this.transientSecretKeys, key.id) + ? (0, crypto_1.serializeKey)(this.transientSecretKeys[key.id], true) + : undefined); + state._vm.invites[key.id] = { + status: constants_js_1.INVITE_STATUS.VALID, + initialQuantity: key.meta.quantity, + quantity: key.meta.quantity, + expires: key.meta.expires, + inviteSecret: inviteSecret, + responses: [] + }; + } + // Is this KEY operation the result of requesting keys for another contract? + if (key.meta?.keyRequest?.contractID && (0, exports.findSuitableSecretKeyId)(state, [SPMessage_js_1.SPMessage.OP_KEY_ADD], ['sig'])) { + const data = this.config.unwrapMaybeEncryptedData(key.meta.keyRequest.contractID); + // Are we subscribed to this contract? + // If we are not subscribed to the contract, we don't set pendingKeyRequests because we don't need that contract's state + // Setting pendingKeyRequests in these cases could result in issues + // when a corresponding OP_KEY_SHARE is received, which could trigger subscribing to this previously unsubscribed to contract + if (data && internalSideEffectStack) { + const keyRequestContractID = data.data; + const reference = this.config.unwrapMaybeEncryptedData(key.meta.keyRequest.reference); + // Since now we'll make changes to keyRequestContractID, we need to + // do this while no other operations are running for that + // contract + internalSideEffectStack.push(() => { + (0, sbp_1.default)('chelonia/private/queueEvent', keyRequestContractID, () => { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + const originatingContractState = rootState[contractID]; + if ((0, sbp_1.default)('chelonia/contract/hasKeyShareBeenRespondedBy', originatingContractState, keyRequestContractID, reference)) { + // In the meantime, our key request has been responded, so we + // don't need to set pendingKeyRequests. + return; + } + if (!(0, turtledash_1.has)(rootState, keyRequestContractID)) + this.config.reactiveSet(rootState, keyRequestContractID, Object.create(null)); + const targetState = rootState[keyRequestContractID]; + if (!targetState._volatile) { + this.config.reactiveSet(targetState, '_volatile', Object.create(null)); + } + if (!targetState._volatile.pendingKeyRequests) { + this.config.reactiveSet(rootState[keyRequestContractID]._volatile, 'pendingKeyRequests', []); + } + if (targetState._volatile.pendingKeyRequests.some((pkr) => { + return pkr && pkr.contractID === contractID && pkr.hash === hash; + })) { + // This pending key request has already been registered. + // Nothing left to do. + return; + } + // Mark the contract for which keys were requested as pending keys + // The hash (of the current message) is added to this dictionary + // for cross-referencing puposes. + targetState._volatile.pendingKeyRequests.push({ contractID, name: key.name, hash, reference: reference?.data }); + this.setPostSyncOp(contractID, 'pending-keys-for-' + keyRequestContractID, ['okTurtles.events/emit', events_js_1.CONTRACT_IS_PENDING_KEY_REQUESTS, { contractID: keyRequestContractID }]); + }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error('Error while setting or updating pendingKeyRequests', { contractID, keyRequestContractID, reference }, e); + }); + }); + } + } + } + // Any persistent keys are stored as a side-effect + if (keysToPersist.length) { + internalSideEffectStack?.push(() => { + (0, sbp_1.default)('chelonia/storeSecretKeys', new Secret_js_1.Secret(keysToPersist)); + }); + } + internalSideEffectStack?.push(() => exports.subscribeToForeignKeyContracts.call(this, contractID, state)); +}; +exports.keyAdditionProcessor = keyAdditionProcessor; +const subscribeToForeignKeyContracts = function (contractID, state) { + try { + Object.values(state._vm.authorizedKeys).filter((key) => !!((key)).foreignKey && (0, exports.findKeyIdByName)(state, ((key)).name) != null).forEach((key) => { + const foreignKey = String(key.foreignKey); + const fkUrl = new URL(foreignKey); + const foreignContract = fkUrl.pathname; + const foreignKeyName = fkUrl.searchParams.get('keyName'); + if (!foreignContract || !foreignKeyName) { + console.warn('Invalid foreign key: missing contract or key name', { contractID, keyId: key.id }); + return; + } + const rootState = (0, sbp_1.default)(this.config.stateSelector); + const signingKey = (0, exports.findSuitableSecretKeyId)(state, [SPMessage_js_1.SPMessage.OP_KEY_DEL], ['sig'], key.ringLevel); + const canMirrorOperations = !!signingKey; + // If we cannot mirror operations, then there is nothing left to do + if (!canMirrorOperations) + return; + // If the key is already being watched, do nothing + if (Array.isArray(rootState?.[foreignContract]?._volatile?.watch)) { + if (rootState[foreignContract]._volatile.watch.find((v) => v[0] === key.name && v[1] === contractID)) + return; + } + if (!(0, turtledash_1.has)(state._vm, 'pendingWatch')) + this.config.reactiveSet(state._vm, 'pendingWatch', Object.create(null)); + if (!(0, turtledash_1.has)(state._vm.pendingWatch, foreignContract)) + this.config.reactiveSet(state._vm.pendingWatch, foreignContract, []); + if (!state._vm.pendingWatch[foreignContract].find(([n]) => n === foreignKeyName)) { + state._vm.pendingWatch[foreignContract].push([foreignKeyName, key.id]); + } + this.setPostSyncOp(contractID, `watchForeignKeys-${contractID}`, ['chelonia/private/watchForeignKeys', contractID]); + }); + } + catch (e) { + console.warn('Error at subscribeToForeignKeyContracts: ' + (e.message || e)); + } +}; +exports.subscribeToForeignKeyContracts = subscribeToForeignKeyContracts; +// Messages might be sent before receiving already posted messages, which will +// result in a conflict +// When resending a message, race conditions might also occur (for example, if +// key rotation is required and there are many clients simultaneously online, it +// may be performed by all connected clients at once). +// The following function handles re-signing of messages when a conflict +// occurs (required because the message's previousHEAD will change) as well as +// duplicate operations. For operations involving keys, the payload will be +// rewritten to eliminate no-longer-relevant keys. In most cases, this would +// result in an empty payload, in which case the message is omitted entirely. +const recreateEvent = (entry, state, contractsState) => { + const { HEAD: previousHEAD, height: previousHeight, previousKeyOp } = contractsState || {}; + if (!previousHEAD) { + throw new Error('recreateEvent: Giving up because the contract has been removed'); + } + const head = entry.head(); + const [opT, rawOpV] = entry.rawOp(); + const recreateOperation = (opT, rawOpV) => { + const opV = rawOpV.valueOf(); + const recreateOperationInternal = (opT, opV) => { + let newOpV; + if (opT === SPMessage_js_1.SPMessage.OP_KEY_ADD) { + if (!Array.isArray(opV)) + throw new Error('Invalid message format'); + newOpV = opV.filter((k) => { + const kId = k.valueOf().id; + return !(0, turtledash_1.has)(state._vm.authorizedKeys, kId) || state._vm.authorizedKeys[kId]._notAfterHeight != null; + }); + // Has this key already been added? (i.e., present in authorizedKeys) + if (newOpV.length === 0) { + console.info('Omitting empty OP_KEY_ADD', { head }); + } + else if (newOpV.length === opV.length) { + return opV; + } + } + else if (opT === SPMessage_js_1.SPMessage.OP_KEY_DEL) { + if (!Array.isArray(opV)) + throw new Error('Invalid message format'); + // Has this key already been removed? (i.e., no longer in authorizedKeys) + newOpV = opV.filter((keyId) => { + const kId = Object(keyId).valueOf(); + return (0, turtledash_1.has)(state._vm.authorizedKeys, kId) && state._vm.authorizedKeys[kId]._notAfterHeight == null; + }); + if (newOpV.length === 0) { + console.info('Omitting empty OP_KEY_DEL', { head }); + } + else if (newOpV.length === opV.length) { + return opV; + } + } + else if (opT === SPMessage_js_1.SPMessage.OP_KEY_UPDATE) { + if (!Array.isArray(opV)) + throw new Error('Invalid message format'); + // Has this key already been replaced? (i.e., no longer in authorizedKeys) + newOpV = opV.filter((k) => { + const oKId = k.valueOf().oldKeyId; + const nKId = k.valueOf().id; + return nKId == null || ((0, turtledash_1.has)(state._vm.authorizedKeys, oKId) && state._vm.authorizedKeys[oKId]._notAfterHeight == null); + }); + if (newOpV.length === 0) { + console.info('Omitting empty OP_KEY_UPDATE', { head }); + } + else if (newOpV.length === opV.length) { + return opV; + } + } + else if (opT === SPMessage_js_1.SPMessage.OP_ATOMIC) { + if (!Array.isArray(opV)) + throw new Error('Invalid message format'); + newOpV = opV.map(([t, v]) => [t, recreateOperationInternal(t, v)]).filter(([, v]) => !!v); + if (newOpV.length === 0) { + console.info('Omitting empty OP_ATOMIC', { head }); + } + else if (newOpV.length === opV.length && newOpV.reduce((acc, cv, i) => acc && cv === opV[i], true)) { + return opV; + } + else { + return newOpV; + } + } + else { + return opV; + } + }; + const newOpV = recreateOperationInternal(opT, opV); + if (newOpV === opV) { + return rawOpV; + } + else if (newOpV === undefined) { + return; + } + if (typeof rawOpV.recreate !== 'function') { + throw new Error('Unable to recreate operation'); + } + return rawOpV.recreate(newOpV); + }; + const newRawOpV = recreateOperation(opT, rawOpV); + if (!newRawOpV) + return; + const newOp = [opT, newRawOpV]; + entry = SPMessage_js_1.SPMessage.cloneWith(head, newOp, { previousKeyOp, previousHEAD, height: previousHeight + 1 }); + return entry; +}; +exports.recreateEvent = recreateEvent; +const getContractIDfromKeyId = (contractID, signingKeyId, state) => { + if (!signingKeyId) + return; + return signingKeyId && state._vm?.authorizedKeys?.[signingKeyId]?.foreignKey + ? new URL(state._vm.authorizedKeys[signingKeyId].foreignKey).pathname + : contractID; +}; +exports.getContractIDfromKeyId = getContractIDfromKeyId; +function eventsAfter(contractID, sinceHeight, limit, sinceHash, { stream } = { stream: true }) { + if (!contractID) { + // Avoid making a network roundtrip to tell us what we already know + throw new Error('Missing contract ID'); + } + let lastUrl; + const fetchEventsStreamReader = async () => { + requestLimit = Math.min(limit ?? MAX_EVENTS_AFTER, remainingEvents); + lastUrl = `${this.config.connectionURL}/eventsAfter/${contractID}/${sinceHeight}${Number.isInteger(requestLimit) ? `/${requestLimit}` : ''}`; + const eventsResponse = await this.config.fetch(lastUrl, { signal }); + if (!eventsResponse.ok) { + const msg = `${eventsResponse.status}: ${eventsResponse.statusText}`; + if (eventsResponse.status === 404 || eventsResponse.status === 410) + throw new errors_js_1.ChelErrorResourceGone(msg, { cause: eventsResponse.status }); + throw new errors_js_1.ChelErrorUnexpectedHttpResponseCode(msg, { cause: eventsResponse.status }); + } + if (!eventsResponse.body) + throw new Error('Missing body'); + latestHeight = parseInt(eventsResponse.headers.get('shelter-headinfo-height'), 10); + if (!Number.isSafeInteger(latestHeight)) + throw new Error('Invalid latest height'); + requestCount++; + return eventsResponse.body.getReader(); + }; + if (!Number.isSafeInteger(sinceHeight) || sinceHeight < 0) { + throw new TypeError('Invalid since height value. Expected positive integer.'); + } + const signal = this.abortController.signal; + let requestCount = 0; + let remainingEvents = limit ?? Number.POSITIVE_INFINITY; + let eventsStreamReader; + let latestHeight; + let state = 'fetch'; + let requestLimit; + let count; + let buffer = ''; + let currentEvent; + // return ReadableStream with a custom pull function to handle streamed data + const s = new ReadableStream({ + // The pull function is called whenever the internal buffer of the stream + // becomes empty and needs more data. + async pull(controller) { + try { + for (;;) { + // Handle different states of the stream reading process. + switch (state) { + // When in 'fetch' state, initiate a new fetch request to obtain a + // stream reader for events. + case 'fetch': { + eventsStreamReader = await fetchEventsStreamReader(); + // Transition to reading the new response and reset the processed + // events counter + state = 'read-new-response'; + count = 0; + break; + } + case 'read-eos': // End of stream case + case 'read-new-response': // Just started reading a new response + case 'read': { // Reading from the response stream + const { done, value } = await eventsStreamReader.read(); + // If done, determine if the stream should close or fetch more + // data by making a new request + if (done) { + // No more events to process or reached the latest event + // Using `>=` instead of `===` to avoid an infinite loop in the + // event of data loss on the server. + if (remainingEvents === 0 || sinceHeight >= latestHeight) { + controller.close(); + return; + } + else if (state === 'read-new-response' || buffer) { + // If done prematurely, throw an error + throw new Error('Invalid response: done too early'); + } + else { + // If there are still events to fetch, switch state to fetch + state = 'fetch'; + break; + } + } + if (!value) { + // If there's no value (e.g., empty response), throw an error + throw new Error('Invalid response: missing body'); + } + // Concatenate new data to the buffer, trimming any + // leading/trailing whitespace (the response is a JSON array of + // base64-encoded data, meaning that whitespace is not significant) + buffer = buffer + Buffer.from(value).toString().trim(); + // If there was only whitespace, try reading again + if (!buffer) + break; + if (state === 'read-new-response') { + // Response is in JSON format, so we look for the start of an + // array (`[`) + if (buffer[0] !== '[') { + throw new Error('Invalid response: no array start delimiter'); + } + // Trim the array start delimiter from the buffer + buffer = buffer.slice(1); + } + else if (state === 'read-eos') { + // If in 'read-eos' state and still reading data, it's an error + // because the response isn't valid JSON (there should be + // nothing other than whitespace after `]`) + throw new Error('Invalid data at the end of response'); + } + // If not handling new response or end-of-stream, switch to + // processing events + state = 'events'; + break; + } + case 'events': { + // Process events by looking for a comma or closing bracket that + // indicates the end of an event + const nextIdx = buffer.search(/(?<=\s*)[,\]]/); + // If the end of the event isn't found, go back to reading more + // data + if (nextIdx < 0) { + state = 'read'; + break; + } + let enqueued = false; + try { + // Extract the current event's value and trim whitespace + const eventValue = buffer.slice(0, nextIdx).trim(); + if (eventValue) { + // Check if the event limit is reached; if so, throw an error + if (count === requestLimit) { + throw new Error('Received too many events'); + } + currentEvent = JSON.parse((0, functions_js_1.b64ToStr)(JSON.parse(eventValue))).message; + if (count === 0) { + const hash = SPMessage_js_1.SPMessage.deserializeHEAD(currentEvent).hash; + const height = SPMessage_js_1.SPMessage.deserializeHEAD(currentEvent).head.height; + if (height !== sinceHeight || (sinceHash && sinceHash !== hash)) { + if (height === sinceHeight && sinceHash && sinceHash !== hash) { + throw new errors_js_1.ChelErrorForkedChain(`Forked chain: hash(${hash}) !== since(${sinceHash})`); + } + else { + throw new Error(`Unexpected data: hash(${hash}) !== since(${sinceHash || ''}) or height(${height}) !== since(${sinceHeight})`); + } + } + } + // If this is the first event in a second or later request, + // drop the event because it's already been included in + // a previous response + if (count++ !== 0 || requestCount !== 0) { + controller.enqueue(currentEvent); + enqueued = true; + remainingEvents--; + } + } + // If the stream is finished (indicated by a closing bracket), + // update `since` (to make the next request if needed) and + // switch to 'read-eos'. + if (buffer[nextIdx] === ']') { + if (currentEvent) { + const deserialized = SPMessage_js_1.SPMessage.deserializeHEAD(currentEvent); + sinceHeight = deserialized.head.height; + sinceHash = deserialized.hash; + state = 'read-eos'; + } + else { + // If the response came empty, assume there are no more events + // after. Mostly this prevents infinite loops if a server is + // claiming there are more events than it's willing to return + // data for. + state = 'eod'; + } + // This should be an empty string now + buffer = buffer.slice(nextIdx + 1).trim(); + } + else if (currentEvent) { + // Otherwise, move the buffer pointer to the next event + buffer = buffer.slice(nextIdx + 1).trimStart(); + } + else { + // If the end delimiter (`]`) is missing, throw an error + throw new Error('Missing end delimiter'); + } + // If an event was successfully enqueued, exit the loop to wait + // for the next pull request + if (enqueued) { + return; + } + } + catch (e) { + console.error('[chelonia] Error during event parsing', e); + throw e; + } + break; + } + case 'eod': { + if (remainingEvents === 0 || sinceHeight >= latestHeight) { + controller.close(); + } + else { + throw new Error('Unexpected end of data'); + } + return; + } + } + } + } + catch (e) { + console.error('[eventsAfter] Error', { lastUrl }, e); + eventsStreamReader?.cancel('Error during pull').catch(e2 => { + console.error('Error canceling underlying event stream reader on error', e, e2); + }); + throw e; + } + } + }); + if (stream) + return s; + // Workaround for + return (0, exports.collectEventStream)(s); +} +function buildShelterAuthorizationHeader(contractID, state) { + if (!state) + state = (0, sbp_1.default)(this.config.stateSelector)[contractID]; + const SAKid = (0, exports.findKeyIdByName)(state, '#sak'); + if (!SAKid) { + throw new Error(`Missing #sak in ${contractID}`); + } + const SAK = this.transientSecretKeys[SAKid]; + if (!SAK) { + throw new Error(`Missing secret #sak (${SAKid}) in ${contractID}`); + } + const deserializedSAK = typeof SAK === 'string' ? (0, crypto_1.deserializeKey)(SAK) : SAK; + const nonceBytes = new Uint8Array(15); + globalThis.crypto.getRandomValues(nonceBytes); + // . + const data = `${contractID} ${(0, sbp_1.default)('chelonia/time')}.${Buffer.from(nonceBytes).toString('base64')}`; + // shelter .. + return `shelter ${data}.${(0, crypto_1.sign)(deserializedSAK, data)}`; +} +function verifyShelterAuthorizationHeader(authorization, rootState) { + const regex = /^shelter (([a-zA-Z0-9]+) ([0-9]+)\.([a-zA-Z0-9+/=]{20}))\.([a-zA-Z0-9+/=]+)$/i; + if (authorization.length > 1024) { + throw new Error('Authorization header too long'); + } + const matches = authorization.match(regex); + if (!matches) { + throw new Error('Unable to parse shelter authorization header'); + } + // TODO: Remember nonces and reject already used ones + const [, data, contractID, timestamp, , signature] = matches; + if (Math.abs(parseInt(timestamp) - Date.now()) > 60e3) { + throw new Error('Invalid signature time range'); + } + if (!rootState) + rootState = (0, sbp_1.default)('chelonia/rootState'); + if (!(0, turtledash_1.has)(rootState, contractID)) { + throw new Error(`Contract ${contractID} from shelter authorization header not found`); + } + const SAKid = (0, exports.findKeyIdByName)(rootState[contractID], '#sak'); + if (!SAKid) { + throw new Error(`Missing #sak in ${contractID}`); + } + const SAK = rootState[contractID]._vm.authorizedKeys[SAKid].data; + if (!SAK) { + throw new Error(`Missing secret #sak (${SAKid}) in ${contractID}`); + } + const deserializedSAK = (0, crypto_1.deserializeKey)(SAK); + (0, crypto_1.verifySignature)(deserializedSAK, data, signature); + return contractID; +} +const clearObject = (o) => { + Object.keys(o).forEach((k) => delete o[k]); +}; +exports.clearObject = clearObject; +const reactiveClearObject = (o, fn) => { + Object.keys(o).forEach((k) => fn(o, k)); +}; +exports.reactiveClearObject = reactiveClearObject; +const checkCanBeGarbageCollected = function (id) { + const rootState = (0, sbp_1.default)(this.config.stateSelector); + return ( + // Check persistent references + (!(0, turtledash_1.has)(rootState.contracts, id) || !rootState.contracts[id] || !(0, turtledash_1.has)(rootState.contracts[id], 'references')) && + // Check ephemeral references + !(0, turtledash_1.has)(this.ephemeralReferenceCount, id)) && + // Check foreign keys (i.e., that no keys from this contract are being watched) + (!(0, turtledash_1.has)(rootState, id) || !(0, turtledash_1.has)(rootState[id], '_volatile') || !(0, turtledash_1.has)(rootState[id]._volatile, 'watch') || rootState[id]._volatile.watch.length === 0 || rootState[id]._volatile.watch.filter(([, cID]) => this.subscriptionSet.has(cID)).length === 0); +}; +exports.checkCanBeGarbageCollected = checkCanBeGarbageCollected; +const collectEventStream = async (s) => { + const reader = s.getReader(); + const r = []; + for (;;) { + const { done, value } = await reader.read(); + if (done) + break; + r.push(value); + } + return r; +}; +exports.collectEventStream = collectEventStream; +// Used inside processing functions for displaying errors at the 'warn' level +// for outgoing messages to increase the signal-to-noise error. See issue #2773. +const logEvtError = (msg, ...args) => { + if (msg._direction === 'outgoing') { + console.warn(...args); + } + else { + console.error(...args); + } +}; +exports.logEvtError = logEvtError; +const handleFetchResult = (type) => { + return function (r) { + if (!r.ok) { + const msg = `${r.status}: ${r.statusText}`; + // 410 is sometimes special (for example, it can mean that a contract or + // a file been deleted) + if (r.status === 404 || r.status === 410) + throw new errors_js_1.ChelErrorResourceGone(msg, { cause: r.status }); + throw new errors_js_1.ChelErrorUnexpectedHttpResponseCode(msg, { cause: r.status }); + } + return r[type](); + }; +}; +exports.handleFetchResult = handleFetchResult; diff --git a/dist/cjs/utils.d.cts b/dist/cjs/utils.d.cts new file mode 100644 index 0000000..7573a74 --- /dev/null +++ b/dist/cjs/utils.d.cts @@ -0,0 +1,36 @@ +import type { SPKey, SPKeyPurpose, SPKeyUpdate, SPOpValue } from './SPMessage.cjs'; +import { SPMessage } from './SPMessage.cjs'; +import type { EncryptedData } from './encryptedData.cjs'; +import { ChelContractKey, ChelContractState, ChelRootState, CheloniaConfig, CheloniaContext, JSONType } from './types.cjs'; +export declare const findKeyIdByName: (state: ChelContractState, name: string) => string | null | undefined; +export declare const findForeignKeysByContractID: (state: ChelContractState, contractID: string) => string[] | undefined; +export declare const findRevokedKeyIdsByName: (state: ChelContractState, name: string) => string[]; +export declare const findSuitableSecretKeyId: (state: ChelContractState, permissions: "*" | string[], purposes: SPKeyPurpose[], ringLevel?: number, allowedActions?: "*" | string[]) => string | null | undefined; +export declare const findContractIDByForeignKeyId: (state: ChelContractState, keyId: string) => string | null | undefined; +export declare const findSuitablePublicKeyIds: (state: ChelContractState, permissions: "*" | string[], purposes: SPKeyPurpose[], ringLevel?: number) => string[] | null | undefined; +export declare const validateKeyPermissions: (msg: SPMessage, config: CheloniaConfig, state: { + _vm: { + authorizedKeys: ChelContractState["_vm"]["authorizedKeys"]; + }; +}, signingKeyId: string, opT: string, opV: SPOpValue) => boolean; +export declare const validateKeyAddPermissions: (this: CheloniaContext, contractID: string, signingKey: ChelContractKey, state: ChelContractState, v: (ChelContractKey | SPKey | EncryptedData)[], skipPrivateCheck?: boolean) => void; +export declare const validateKeyDelPermissions: (this: CheloniaContext, contractID: string, signingKey: ChelContractKey, state: ChelContractState, v: (string | EncryptedData)[]) => void; +export declare const validateKeyUpdatePermissions: (this: CheloniaContext, contractID: string, signingKey: ChelContractKey, state: ChelContractState, v: (SPKeyUpdate | EncryptedData)[]) => [ChelContractKey[], Record]; +export declare const keyAdditionProcessor: (this: CheloniaContext, _msg: SPMessage, hash: string, keys: (ChelContractKey | SPKey | EncryptedData)[], state: ChelContractState, contractID: string, _signingKey: ChelContractKey, internalSideEffectStack?: (({ state, message }: { + state: ChelContractState; + message: SPMessage; +}) => void)[]) => void; +export declare const subscribeToForeignKeyContracts: (this: CheloniaContext, contractID: string, state: ChelContractState) => void; +export declare const recreateEvent: (entry: SPMessage, state: ChelContractState, contractsState: ChelRootState["contracts"][string]) => undefined | SPMessage; +export declare const getContractIDfromKeyId: (contractID: string, signingKeyId: string | null | undefined, state: ChelContractState) => string | null | undefined; +export declare function eventsAfter(this: CheloniaContext, contractID: string, sinceHeight: number, limit?: number, sinceHash?: string, { stream }?: { + stream: boolean; +}): ReadableStream | Promise; +export declare function buildShelterAuthorizationHeader(this: CheloniaContext, contractID: string, state?: ChelContractState): string; +export declare function verifyShelterAuthorizationHeader(authorization: string, rootState?: object): string; +export declare const clearObject: (o: object) => void; +export declare const reactiveClearObject: (o: T, fn: (o: T, k: keyof T) => void) => void; +export declare const checkCanBeGarbageCollected: (this: CheloniaContext, id: string) => boolean; +export declare const collectEventStream: (s: ReadableStream) => Promise; +export declare const logEvtError: (msg: SPMessage, ...args: unknown[]) => void; +export declare const handleFetchResult: (type: "text" | "json" | "blob") => ((r: Response) => Promise); diff --git a/dist/esm/SPMessage.d.mts b/dist/esm/SPMessage.d.mts new file mode 100644 index 0000000..be7a616 --- /dev/null +++ b/dist/esm/SPMessage.d.mts @@ -0,0 +1,215 @@ +import type { Key } from '@chelonia/crypto'; +import { CURVE25519XSALSA20POLY1305, EDWARDS25519SHA512BATCH, XSALSA20POLY1305 } from '@chelonia/crypto'; +import { serdesDeserializeSymbol, serdesSerializeSymbol, serdesTagSymbol } from '@chelonia/serdes'; +import type { EncryptedData } from './encryptedData.mjs'; +import type { SignedData } from './signedData.mjs'; +import type { ChelContractState, JSONObject, JSONType } from './types.mjs'; +export type SPKeyType = typeof EDWARDS25519SHA512BATCH | typeof CURVE25519XSALSA20POLY1305 | typeof XSALSA20POLY1305; +export type SPKeyPurpose = 'enc' | 'sig' | 'sak'; +export type SPKey = { + id: string; + name: string; + purpose: SPKeyPurpose[]; + ringLevel: number; + permissions: '*' | string[]; + allowedActions?: '*' | string[]; + meta?: { + quantity?: number; + expires?: number; + private?: { + transient?: boolean; + content?: EncryptedData; + shareable?: boolean; + oldKeys?: string; + }; + keyRequest?: { + contractID?: string; + reference?: string | EncryptedData; + }; + }; + data: string; + foreignKey?: string; + _notBeforeHeight: number; + _notAfterHeight?: number; + _private?: string; +}; +export type SPOpContract = { + type: string; + keys: (SPKey | EncryptedData)[]; + parentContract?: string; +}; +export type ProtoSPOpActionUnencrypted = { + action: string; + data: JSONType; + meta: JSONObject; +}; +export type SPOpActionUnencrypted = ProtoSPOpActionUnencrypted | SignedData; +export type SPOpActionEncrypted = EncryptedData; +export type SPOpKeyAdd = (SPKey | EncryptedData)[]; +export type SPOpKeyDel = (string | EncryptedData)[]; +export type SPOpPropSet = { + key: string; + value: JSONType; +}; +export type ProtoSPOpKeyShare = { + contractID: string; + keys: SPKey[]; + foreignContractID?: string; + keyRequestHash?: string; + keyRequestHeight?: number; +}; +export type SPOpKeyShare = ProtoSPOpKeyShare | EncryptedData; +export type ProtoSPOpKeyRequest = { + contractID: string; + height: number; + replyWith: SignedData<{ + encryptionKeyId: string; + responseKey: EncryptedData; + }>; + request: string; +}; +export type SPOpKeyRequest = ProtoSPOpKeyRequest | EncryptedData; +export type ProtoSPOpKeyRequestSeen = { + keyRequestHash: string; + keyShareHash?: string; + success: boolean; +}; +export type SPOpKeyRequestSeen = ProtoSPOpKeyRequestSeen | EncryptedData; +export type SPKeyUpdate = { + name: string; + id?: string; + oldKeyId: string; + data?: string; + purpose?: string[]; + permissions?: string[]; + allowedActions?: '*' | string[]; + meta?: { + quantity?: number; + expires?: number; + private?: { + transient?: boolean; + content?: string; + shareable?: boolean; + oldKeys?: string; + }; + }; +}; +export type SPOpKeyUpdate = (SPKeyUpdate | EncryptedData)[]; +export type SPOpType = 'c' | 'a' | 'ae' | 'au' | 'ka' | 'kd' | 'ku' | 'pu' | 'ps' | 'pd' | 'ks' | 'kr' | 'krs'; +type ProtoSPOpValue = SPOpContract | SPOpActionEncrypted | SPOpActionUnencrypted | SPOpKeyAdd | SPOpKeyDel | SPOpPropSet | SPOpKeyShare | SPOpKeyRequest | SPOpKeyRequestSeen | SPOpKeyUpdate; +export type ProtoSPOpMap = { + 'c': SPOpContract; + 'ae': SPOpActionEncrypted; + 'au': SPOpActionUnencrypted; + 'ka': SPOpKeyAdd; + 'kd': SPOpKeyDel; + 'ku': SPOpKeyUpdate; + 'pu': never; + 'ps': SPOpPropSet; + 'pd': never; + 'ks': SPOpKeyShare; + 'kr': SPOpKeyRequest; + 'krs': SPOpKeyRequestSeen; +}; +export type SPOpAtomic = { + [K in keyof ProtoSPOpMap]: [K, ProtoSPOpMap[K]]; +}[keyof ProtoSPOpMap][]; +export type SPOpValue = ProtoSPOpValue | SPOpAtomic; +export type SPOpRaw = [SPOpType, SignedData]; +export type SPOpMap = ProtoSPOpMap & { + 'a': SPOpAtomic; +}; +export type SPOp = { + [K in keyof SPOpMap]: [K, SPOpMap[K]]; +}[keyof SPOpMap]; +export type SPMsgDirection = 'incoming' | 'outgoing'; +export type SPHead = { + version: '1.0.0'; + op: SPOpType; + height: number; + contractID: string | null; + previousKeyOp: string | null; + previousHEAD: string | null; + manifest: string; +}; +type SPMsgParams = { + direction: SPMsgDirection; + mapping: { + key: string; + value: string; + }; + head: SPHead; + signedMessageData: SignedData; +}; +export declare class SPMessage { + _mapping: { + key: string; + value: string; + }; + _head: SPHead; + _message: SPOpValue; + _signedMessageData: SignedData; + _direction: SPMsgDirection; + _decryptedValue?: unknown; + _innerSigningKeyId?: string; + static OP_CONTRACT: "c"; + static OP_ACTION_ENCRYPTED: "ae"; + static OP_ACTION_UNENCRYPTED: "au"; + static OP_KEY_ADD: "ka"; + static OP_KEY_DEL: "kd"; + static OP_KEY_UPDATE: "ku"; + static OP_PROTOCOL_UPGRADE: "pu"; + static OP_PROP_SET: "ps"; + static OP_PROP_DEL: "pd"; + static OP_CONTRACT_AUTH: "ca"; + static OP_CONTRACT_DEAUTH: "cd"; + static OP_ATOMIC: "a"; + static OP_KEY_SHARE: "ks"; + static OP_KEY_REQUEST: "kr"; + static OP_KEY_REQUEST_SEEN: "krs"; + static createV1_0({ contractID, previousHEAD, previousKeyOp, height, op, manifest }: { + contractID: string | null; + previousHEAD?: string | null; + previousKeyOp?: string | null; + height?: number; + op: SPOpRaw; + manifest: string; + }): SPMessage; + static cloneWith(targetHead: SPHead, targetOp: SPOpRaw, sources: Partial): SPMessage; + static deserialize(value: string, additionalKeys?: Record, state?: ChelContractState, unwrapMaybeEncryptedDataFn?: (data: SPKey | EncryptedData) => { + encryptionKeyId: string | null; + data: SPKey; + } | undefined): SPMessage; + static deserializeHEAD(value: string): { + head: SPHead; + hash: string; + contractID: string; + isFirstMessage: boolean; + description: () => string; + }; + constructor(params: SPMsgParams); + decryptedValue(): unknown | undefined; + innerSigningKeyId(): string | undefined; + head(): SPHead; + message(): SPOpValue; + op(): SPOp; + rawOp(): SPOpRaw; + opType(): SPOpType; + opValue(): SPOpValue; + signingKeyId(): string; + manifest(): string; + description(): string; + isFirstMessage(): boolean; + contractID(): string; + serialize(): string; + hash(): string; + previousKeyOp(): string | null; + height(): number; + id(): string; + direction(): 'incoming' | 'outgoing'; + isKeyOp(): boolean; + static get [serdesTagSymbol](): string; + static [serdesSerializeSymbol](m: SPMessage): unknown[]; + static [serdesDeserializeSymbol]([serialized, direction, decryptedValue, innerSigningKeyId]: [string, SPMsgDirection, object, string]): SPMessage; +} +export {}; diff --git a/dist/esm/SPMessage.mjs b/dist/esm/SPMessage.mjs new file mode 100644 index 0000000..dbce059 --- /dev/null +++ b/dist/esm/SPMessage.mjs @@ -0,0 +1,427 @@ +import { keyId } from '@chelonia/crypto'; +import { serdesDeserializeSymbol, serdesSerializeSymbol, serdesTagSymbol } from '@chelonia/serdes'; +import { has } from 'turtledash'; +import { encryptedIncomingData, encryptedIncomingForeignData, maybeEncryptedIncomingData, unwrapMaybeEncryptedData } from './encryptedData.mjs'; +import { createCID, multicodes } from './functions.mjs'; +import { isRawSignedData, isSignedData, rawSignedIncomingData, signedIncomingData } from './signedData.mjs'; +// Takes a raw message and processes it so that EncryptedData and SignedData +// attributes are defined +const decryptedAndVerifiedDeserializedMessage = (head, headJSON, contractID, parsedMessage, additionalKeys, state) => { + const op = head.op; + const height = head.height; + const message = op === SPMessage.OP_ACTION_ENCRYPTED + ? encryptedIncomingData(contractID, state, parsedMessage, height, additionalKeys, headJSON, undefined) + : parsedMessage; + // If the operation is SPMessage.OP_KEY_ADD or SPMessage.OP_KEY_UPDATE, + // extract encrypted data from key.meta?.private?.content + if ([SPMessage.OP_KEY_ADD, SPMessage.OP_KEY_UPDATE].includes(op)) { + return message.map((key) => { + return maybeEncryptedIncomingData(contractID, state, key, height, additionalKeys, headJSON, (key) => { + if (key.meta?.private?.content) { + key.meta.private.content = encryptedIncomingData(contractID, state, key.meta.private.content, height, additionalKeys, headJSON, (value) => { + // Validator function to verify the key matches its expected ID + const computedKeyId = keyId(value); + if (computedKeyId !== key.id) { + throw new Error(`Key ID mismatch. Expected to decrypt key ID ${key.id} but got ${computedKeyId}`); + } + }); + } + // key.meta?.keyRequest?.contractID could be optionally encrypted + if (key.meta?.keyRequest?.reference) { + try { + key.meta.keyRequest.reference = maybeEncryptedIncomingData(contractID, state, key.meta.keyRequest.reference, height, additionalKeys, headJSON)?.valueOf(); + } + catch { + // If we couldn't decrypt it, this value is of no use to us (we + // can't keep track of key requests and key shares), so we delete it + delete key.meta.keyRequest.reference; + } + } + // key.meta?.keyRequest?.contractID could be optionally encrypted + if (key.meta?.keyRequest?.contractID) { + try { + key.meta.keyRequest.contractID = maybeEncryptedIncomingData(contractID, state, key.meta.keyRequest.contractID, height, additionalKeys, headJSON)?.valueOf(); + } + catch { + // If we couldn't decrypt it, this value is of no use to us (we + // can't keep track of key requests and key shares), so we delete it + delete key.meta.keyRequest.contractID; + } + } + }); + }); + } + // If the operation is SPMessage.OP_CONTRACT, + // extract encrypted data from keys?.[].meta?.private?.content + if (op === SPMessage.OP_CONTRACT) { + message.keys = message.keys?.map((key) => { + return maybeEncryptedIncomingData(contractID, state, key, height, additionalKeys, headJSON, (key) => { + if (!key.meta?.private?.content) + return; + // The following two lines are commented out because this feature + // (using a foreign decryption contract) doesn't seem to be in use and + // the use case seems unclear. + // const decryptionFn = key.meta.private.foreignContractID ? encryptedIncomingForeignData : encryptedIncomingData + // const decryptionContract = key.meta.private.foreignContractID ? key.meta.private.foreignContractID : contractID + const decryptionFn = encryptedIncomingData; + const decryptionContract = contractID; + key.meta.private.content = decryptionFn(decryptionContract, state, key.meta.private.content, height, additionalKeys, headJSON, (value) => { + const computedKeyId = keyId(value); + if (computedKeyId !== key.id) { + throw new Error(`Key ID mismatch. Expected to decrypt key ID ${key.id} but got ${computedKeyId}`); + } + }); + }); + }); + } + // If the operation is SPMessage.OP_KEY_SHARE, + // extract encrypted data from keys?.[].meta?.private?.content + if (op === SPMessage.OP_KEY_SHARE) { + return maybeEncryptedIncomingData(contractID, state, message, height, additionalKeys, headJSON, (message) => { + message.keys?.forEach((key) => { + if (!key.meta?.private?.content) + return; + const decryptionFn = message.foreignContractID ? encryptedIncomingForeignData : encryptedIncomingData; + const decryptionContract = message.foreignContractID || contractID; + key.meta.private.content = decryptionFn(decryptionContract, state, key.meta.private.content, height, additionalKeys, headJSON, (value) => { + const computedKeyId = keyId(value); + if (computedKeyId !== key.id) { + throw new Error(`Key ID mismatch. Expected to decrypt key ID ${key.id} but got ${computedKeyId}`); + } + }); + }); + }); + } + // If the operation is OP_KEY_REQUEST, the payload might be EncryptedData + // The ReplyWith attribute is SignedData + if (op === SPMessage.OP_KEY_REQUEST) { + return maybeEncryptedIncomingData(contractID, state, message, height, additionalKeys, headJSON, (msg) => { + msg.replyWith = signedIncomingData(msg.contractID, undefined, msg.replyWith, msg.height, headJSON); + }); + } + // If the operation is OP_ACTION_UNENCRYPTED, it may contain an inner + // signature + // Actions must be signed using a key for the current contract + if (op === SPMessage.OP_ACTION_UNENCRYPTED && isRawSignedData(message)) { + return signedIncomingData(contractID, state, message, height, headJSON); + } + // Inner signatures are handled by EncryptedData + if (op === SPMessage.OP_ACTION_ENCRYPTED) { + return message; + } + if (op === SPMessage.OP_KEY_DEL) { + return message.map((key) => { + return maybeEncryptedIncomingData(contractID, state, key, height, additionalKeys, headJSON, undefined); + }); + } + if (op === SPMessage.OP_KEY_REQUEST_SEEN) { + return maybeEncryptedIncomingData(contractID, state, parsedMessage, height, additionalKeys, headJSON, undefined); + } + // If the operation is OP_ATOMIC, call this function recursively + if (op === SPMessage.OP_ATOMIC) { + return message + .map(([opT, opV]) => [ + opT, + decryptedAndVerifiedDeserializedMessage({ ...head, op: opT }, headJSON, contractID, opV, additionalKeys, state) + ]); + } + return message; +}; +export class SPMessage { + // flow type annotations to make flow happy + _mapping; + _head; + _message; + _signedMessageData; + _direction; + _decryptedValue; + _innerSigningKeyId; + static OP_CONTRACT = 'c'; + static OP_ACTION_ENCRYPTED = 'ae'; // e2e-encrypted action + static OP_ACTION_UNENCRYPTED = 'au'; // publicly readable action + static OP_KEY_ADD = 'ka'; // add this key to the list of keys allowed to write to this contract, or update an existing key + static OP_KEY_DEL = 'kd'; // remove this key from authorized keys + static OP_KEY_UPDATE = 'ku'; // update key in authorized keys + static OP_PROTOCOL_UPGRADE = 'pu'; + static OP_PROP_SET = 'ps'; // set a public key/value pair + static OP_PROP_DEL = 'pd'; // delete a public key/value pair + static OP_CONTRACT_AUTH = 'ca'; // authorize a contract + static OP_CONTRACT_DEAUTH = 'cd'; // deauthorize a contract + static OP_ATOMIC = 'a'; // atomic op + static OP_KEY_SHARE = 'ks'; // key share + static OP_KEY_REQUEST = 'kr'; // key request + static OP_KEY_REQUEST_SEEN = 'krs'; // key request response + // eslint-disable-next-line camelcase + static createV1_0({ contractID, previousHEAD = null, previousKeyOp = null, + // Height will be automatically set to the correct value when sending + // The reason to set it to Number.MAX_SAFE_INTEGER is so that we can + // temporarily process outgoing messages with signature validation + // still working + height = Number.MAX_SAFE_INTEGER, op, manifest }) { + const head = { + version: '1.0.0', + previousHEAD, + previousKeyOp, + height, + contractID, + op: op[0], + manifest + }; + return new this(messageToParams(head, op[1])); + } + // SPMessage.cloneWith could be used when make a SPMessage object having the same id() + // https://github.com/okTurtles/group-income/issues/1503 + static cloneWith(targetHead, targetOp, sources) { + const head = Object.assign({}, targetHead, sources); + return new this(messageToParams(head, targetOp[1])); + } + static deserialize(value, additionalKeys, state, unwrapMaybeEncryptedDataFn = unwrapMaybeEncryptedData) { + if (!value) + throw new Error(`deserialize bad value: ${value}`); + const { head: headJSON, ...parsedValue } = JSON.parse(value); + const head = JSON.parse(headJSON); + const contractID = head.op === SPMessage.OP_CONTRACT ? createCID(value, multicodes.SHELTER_CONTRACT_DATA) : head.contractID; + // Special case for OP_CONTRACT, since the keys are not yet present in the + // state + if (!state?._vm?.authorizedKeys && head.op === SPMessage.OP_CONTRACT) { + const value = rawSignedIncomingData(parsedValue); + const authorizedKeys = Object.fromEntries(value.valueOf()?.keys.map(wk => { + const k = unwrapMaybeEncryptedDataFn(wk); + if (!k) + return null; + return [k.data.id, k.data]; + // eslint-disable-next-line no-use-before-define + }).filter(Boolean)); + state = { + _vm: { + type: head.type, + authorizedKeys + } + }; + } + const signedMessageData = signedIncomingData(contractID, state, parsedValue, head.height, headJSON, (message) => decryptedAndVerifiedDeserializedMessage(head, headJSON, contractID, message, additionalKeys, state)); + return new this({ + direction: 'incoming', + mapping: { key: createCID(value, multicodes.SHELTER_CONTRACT_DATA), value }, + head, + signedMessageData + }); + } + static deserializeHEAD(value) { + if (!value) + throw new Error(`deserialize bad value: ${value}`); + let head, hash; + const result = { + get head() { + if (head === undefined) { + head = JSON.parse(JSON.parse(value).head); + } + return head; + }, + get hash() { + if (!hash) { + hash = createCID(value, multicodes.SHELTER_CONTRACT_DATA); + } + return hash; + }, + get contractID() { + return result.head?.contractID ?? result.hash; + }, + // `description` is not a getter to prevent the value from being copied + // if the object is cloned or serialized + description() { + const type = this.head.op; + return ``; + }, + get isFirstMessage() { + return !result.head?.contractID; + } + }; + return result; + } + constructor(params) { + this._direction = params.direction; + this._mapping = params.mapping; + this._head = params.head; + this._signedMessageData = params.signedMessageData; + // perform basic sanity check + const type = this.opType(); + let atomicTopLevel = true; + const validate = (type, message) => { + switch (type) { + case SPMessage.OP_CONTRACT: + if (!this.isFirstMessage() || !atomicTopLevel) + throw new Error('OP_CONTRACT: must be first message'); + break; + case SPMessage.OP_ATOMIC: + if (!atomicTopLevel) { + throw new Error('OP_ATOMIC not allowed inside of OP_ATOMIC'); + } + if (!Array.isArray(message)) { + throw new TypeError('OP_ATOMIC must be of an array type'); + } + atomicTopLevel = false; + message.forEach(([t, m]) => validate(t, m)); + break; + case SPMessage.OP_KEY_ADD: + case SPMessage.OP_KEY_DEL: + case SPMessage.OP_KEY_UPDATE: + if (!Array.isArray(message)) + throw new TypeError('OP_KEY_{ADD|DEL|UPDATE} must be of an array type'); + break; + case SPMessage.OP_KEY_SHARE: + case SPMessage.OP_KEY_REQUEST: + case SPMessage.OP_KEY_REQUEST_SEEN: + case SPMessage.OP_ACTION_ENCRYPTED: + case SPMessage.OP_ACTION_UNENCRYPTED: + // nothing for now + break; + default: + throw new Error(`unsupported op: ${type}`); + } + }; + // this._message is set as a getter to verify the signature only once the + // message contents are read + Object.defineProperty(this, '_message', { + get: ((validated) => () => { + const message = this._signedMessageData.valueOf(); + // If we haven't validated the message, validate it now + if (!validated) { + validate(type, message); + validated = true; + } + return message; + })() + }); + } + decryptedValue() { + if (this._decryptedValue) + return this._decryptedValue; + try { + const value = this.message(); + // TODO: This uses `unwrapMaybeEncryptedData` instead of a configurable + // version based on `skipDecryptionAttempts`. This is fine based on current + // use, and also something else might be confusing based on the explicit + // name of this function, `decryptedValue`. + const data = unwrapMaybeEncryptedData(value); + // Did decryption succeed? (unwrapMaybeEncryptedData will return undefined + // on failure) + if (data?.data) { + // The data inside could be signed. In this case, we unwrap that to get + // to the inner contents + if (isSignedData(data.data)) { + this._innerSigningKeyId = data.data.signingKeyId; + this._decryptedValue = data.data.valueOf(); + } + else { + this._decryptedValue = data.data; + } + } + return this._decryptedValue; + } + catch { + // Signature or encryption error + // We don't log this error because it's already logged when the value is + // retrieved + return undefined; + } + } + innerSigningKeyId() { + if (!this._decryptedValue) { + this.decryptedValue(); + } + return this._innerSigningKeyId; + } + head() { return this._head; } + message() { return this._message; } + op() { return [this.head().op, this.message()]; } + rawOp() { return [this.head().op, this._signedMessageData]; } + opType() { return this.head().op; } + opValue() { return this.message(); } + signingKeyId() { return this._signedMessageData.signingKeyId; } + manifest() { return this.head().manifest; } + description() { + const type = this.opType(); + let desc = ``; + } + isFirstMessage() { return !this.head().contractID; } + contractID() { return this.head().contractID || this.hash(); } + serialize() { return this._mapping.value; } + hash() { return this._mapping.key; } + previousKeyOp() { return this._head.previousKeyOp; } + height() { return this._head.height; } + id() { + // TODO: Schedule for later removal + throw new Error('SPMessage.id() was called but it has been removed'); + } + direction() { + return this._direction; + } + // `isKeyOp` is used to filter out non-key operations for providing an + // abbreviated chain fo snapshot validation + isKeyOp() { + let value; + return !!(keyOps.includes(this.opType()) || + (this.opType() === SPMessage.OP_ATOMIC && Array.isArray(value = this.opValue()) && value.some(([opT]) => { + return keyOps.includes(opT); + }))); + } + static get [serdesTagSymbol]() { + return 'SPMessage'; + } + static [serdesSerializeSymbol](m) { + return [m.serialize(), m.direction(), m.decryptedValue(), m.innerSigningKeyId()]; + } + static [serdesDeserializeSymbol]([serialized, direction, decryptedValue, innerSigningKeyId]) { + const m = SPMessage.deserialize(serialized); + m._direction = direction; + m._decryptedValue = decryptedValue; + m._innerSigningKeyId = innerSigningKeyId; + return m; + } +} +function messageToParams(head, message) { + // NOTE: the JSON strings generated here must be preserved forever. + // do not ever regenerate this message using the contructor. + // instead store it using serialize() and restore it using deserialize(). + // The issue is that different implementations of JavaScript engines might generate different strings + // when serializing JS objects using JSON.stringify + // and that would lead to different hashes resulting from createCID. + // So to get around this we save the serialized string upon creation + // and keep a copy of it (instead of regenerating it as needed). + // https://github.com/okTurtles/group-income/pull/1513#discussion_r1142809095 + let mapping; + return { + direction: has(message, 'recreate') ? 'outgoing' : 'incoming', + // Lazy computation of mapping to prevent us from serializing outgoing + // atomic operations + get mapping() { + if (!mapping) { + const headJSON = JSON.stringify(head); + const messageJSON = { ...message.serialize(headJSON), head: headJSON }; + const value = JSON.stringify(messageJSON); + mapping = { + key: createCID(value, multicodes.SHELTER_CONTRACT_DATA), + value + }; + } + return mapping; + }, + head, + signedMessageData: message + }; +} +// Operations that affect valid keys +const keyOps = [SPMessage.OP_CONTRACT, SPMessage.OP_KEY_ADD, SPMessage.OP_KEY_DEL, SPMessage.OP_KEY_UPDATE]; diff --git a/dist/esm/Secret.d.mts b/dist/esm/Secret.d.mts new file mode 100644 index 0000000..39e5744 --- /dev/null +++ b/dist/esm/Secret.d.mts @@ -0,0 +1,8 @@ +import { serdesDeserializeSymbol, serdesSerializeSymbol, serdesTagSymbol } from '@chelonia/serdes'; +export declare class Secret { + static [serdesDeserializeSymbol](secret: T): Secret; + static [serdesSerializeSymbol](secret: Secret): any; + static get [serdesTagSymbol](): string; + constructor(value: T); + valueOf(): T; +} diff --git a/dist/esm/Secret.mjs b/dist/esm/Secret.mjs new file mode 100644 index 0000000..67bdb6a --- /dev/null +++ b/dist/esm/Secret.mjs @@ -0,0 +1,25 @@ +import { serdesDeserializeSymbol, serdesSerializeSymbol, serdesTagSymbol } from '@chelonia/serdes'; +/* Wrapper class for secrets, which identifies them as such and prevents them +from being logged */ +// Use a `WeakMap` to store the actual secret outside of the returned `Secret` +// object. This ensures that the only way to access the secret is via the +// `.valueOf()` method, and it prevents accidentally logging things that +// shouldn't be logged. +const wm = new WeakMap(); +export class Secret { + static [serdesDeserializeSymbol](secret) { + return new this(secret); + } + static [serdesSerializeSymbol](secret) { + return wm.get(secret); + } + static get [serdesTagSymbol]() { + return '__chelonia_Secret'; + } + constructor(value) { + wm.set(this, value); + } + valueOf() { + return wm.get(this); + } +} diff --git a/dist/esm/chelonia-utils.d.mts b/dist/esm/chelonia-utils.d.mts new file mode 100644 index 0000000..d451d2b --- /dev/null +++ b/dist/esm/chelonia-utils.d.mts @@ -0,0 +1,2 @@ +declare const _default: string[]; +export default _default; diff --git a/dist/esm/chelonia-utils.mjs b/dist/esm/chelonia-utils.mjs new file mode 100644 index 0000000..06e6ef6 --- /dev/null +++ b/dist/esm/chelonia-utils.mjs @@ -0,0 +1,29 @@ +import sbp from '@sbp/sbp'; +// This file contains non-core parts of Chelonia, i.e., functionality that is +// useful but optional. The threshold for something being 'optional' generally +// is something that can be implemented externally using only public Chelonia +// selectors. +// Optional functionality can make certain assumptions about contracts or +// actions to make things simpler or easier to implement. +// Currently, a single selector is defined: 'chelonia/kv/queuedSet'. +// TODO: Other things should be moved to this file, such as `encryptedAction` +// (the wrapper) and 'gi.actions/out/rotateKeys'. +export default sbp('sbp/selectors/register', { + // This selector is a wrapper for the `chelonia/kv/set` selector that uses + // the contract queue and allows referring to keys by name, with default key + // names set to `csk` and `cek` for signatures and encryption, respectively. + // For most 'simple' use cases, this selector is a better choice than + // `chelonia/kv/set`. However, the `chelonia/kv/set` primitive is needed if + // the queueing logic needs to be more advanced, the key to use requires + // custom logic or _if the `onconflict` callback also needs to be queued_. + 'chelonia/kv/queuedSet': ({ contractID, key, data, onconflict, ifMatch, encryptionKeyName = 'cek', signingKeyName = 'csk' }) => { + return sbp('chelonia/queueInvocation', contractID, () => { + return sbp('chelonia/kv/set', contractID, key, data, { + ifMatch, + encryptionKeyId: sbp('chelonia/contract/currentKeyIdByName', contractID, encryptionKeyName), + signingKeyId: sbp('chelonia/contract/currentKeyIdByName', contractID, signingKeyName), + onconflict + }); + }); + } +}); diff --git a/dist/esm/chelonia.d.mts b/dist/esm/chelonia.d.mts new file mode 100644 index 0000000..dbc9d1d --- /dev/null +++ b/dist/esm/chelonia.d.mts @@ -0,0 +1,175 @@ +import '@sbp/okturtles.eventqueue'; +import '@sbp/okturtles.events'; +import type { SPKey, SPOpKeyAdd, SPOpKeyDel, SPOpKeyRequestSeen, SPOpKeyShare, SPOpKeyUpdate } from './SPMessage.mjs'; +import type { Key } from '@chelonia/crypto'; +import { SPMessage } from './SPMessage.mjs'; +import './chelonia-utils.mjs'; +import type { EncryptedData } from './encryptedData.mjs'; +import './files.mjs'; +import './internals.mjs'; +import './time-sync.mjs'; +import { ChelContractState } from './types.mjs'; +export type ChelRegParams = { + contractName: string; + server?: string; + data: object; + signingKeyId: string; + actionSigningKeyId: string; + actionEncryptionKeyId?: string | null | undefined; + keys: (SPKey | EncryptedData)[]; + namespaceRegistration?: string | null | undefined; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + postpublishContract?: (msg: SPMessage) => void; + preSendCheck?: (msg: SPMessage, state: ChelContractState) => void; + beforeRequest?: (msg1: SPMessage, msg2: SPMessage) => Promise | void; + prepublish?: (msg: SPMessage) => Promise | void; + postpublish?: (msg: SPMessage) => Promise | void; + onprocessed?: (msg: SPMessage) => Promise | void; + }; + publishOptions?: { + headers?: Record | null | undefined; + billableContractID?: string | null | undefined; + maxAttempts?: number | null | undefined; + }; +}; +export type ChelActionParams = { + action: string; + server?: string; + contractID: string; + data: object; + signingKeyId: string; + innerSigningKeyId: string; + encryptionKeyId?: string | null | undefined; + encryptionKey?: Key | null | undefined; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise | void; + postpublish?: (msg: SPMessage) => Promise | void; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelKeyAddParams = { + contractName: string; + contractID: string; + data: SPOpKeyAdd; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise | void; + postpublish?: (msg: SPMessage) => Promise | void; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelKeyDelParams = { + contractName: string; + contractID: string; + data: SPOpKeyDel; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelKeyUpdateParams = { + contractName: string; + contractID: string; + data: SPOpKeyUpdate; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelKeyShareParams = { + originatingContractID?: string; + originatingContractName?: string; + contractID: string; + contractName: string; + data: SPOpKeyShare; + signingKeyId?: string; + signingKey?: Key; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts: number; + }; + atomic: boolean; +}; +export type ChelKeyRequestParams = { + originatingContractID: string; + originatingContractName: string; + contractName: string; + contractID: string; + signingKeyId: string; + innerSigningKeyId: string; + encryptionKeyId: string; + innerEncryptionKeyId: string; + encryptKeyRequestMetadata?: boolean; + permissions?: '*' | string[]; + allowedActions?: '*' | string[]; + reference?: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelKeyRequestResponseParams = { + contractName: string; + contractID: string; + data: SPOpKeyRequestSeen; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts?: number; + }; + atomic: boolean; +}; +export type ChelAtomicParams = { + originatingContractID: string; + originatingContractName: string; + contractName: string; + contractID: string; + signingKeyId: string; + data: [sel: string, data: ChelActionParams | ChelKeyRequestParams | ChelKeyShareParams][]; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { + maxAttempts?: number; + }; +}; +export { SPMessage }; +export declare const ACTION_REGEX: RegExp; +declare const _default: string[]; +export default _default; diff --git a/dist/esm/chelonia.mjs b/dist/esm/chelonia.mjs new file mode 100644 index 0000000..1925096 --- /dev/null +++ b/dist/esm/chelonia.mjs @@ -0,0 +1,1844 @@ +import '@sbp/okturtles.eventqueue'; +import '@sbp/okturtles.events'; +import sbp from '@sbp/sbp'; +import { cloneDeep, delay, difference, has, intersection, merge, randomHexString, randomIntFromRange } from 'turtledash'; +import { createCID, parseCID } from './functions.mjs'; +import { NOTIFICATION_TYPE, createClient } from './pubsub/index.mjs'; +import { EDWARDS25519SHA512BATCH, deserializeKey, keyId, keygen, serializeKey } from '@chelonia/crypto'; +import { ChelErrorResourceGone, ChelErrorUnexpected, ChelErrorUnexpectedHttpResponseCode, ChelErrorUnrecoverable } from './errors.mjs'; +import { CHELONIA_RESET, CONTRACTS_MODIFIED, CONTRACT_REGISTERED } from './events.mjs'; +import { SPMessage } from './SPMessage.mjs'; +import './chelonia-utils.mjs'; +import { encryptedOutgoingData, encryptedOutgoingDataWithRawKey, isEncryptedData, maybeEncryptedIncomingData, unwrapMaybeEncryptedData } from './encryptedData.mjs'; +import './files.mjs'; +import './internals.mjs'; +import { isSignedData, signedIncomingData, signedOutgoingData, signedOutgoingDataWithRawKey } from './signedData.mjs'; +import './time-sync.mjs'; +import { buildShelterAuthorizationHeader, checkCanBeGarbageCollected, clearObject, collectEventStream, eventsAfter, findForeignKeysByContractID, findKeyIdByName, findRevokedKeyIdsByName, findSuitableSecretKeyId, getContractIDfromKeyId, handleFetchResult, reactiveClearObject } from './utils.mjs'; +export { SPMessage }; +export const ACTION_REGEX = /^((([\w.]+)\/([^/]+))(?:\/(?:([^/]+)\/)?)?)\w*/; +// ACTION_REGEX.exec('gi.contracts/group/payment/process') +// 0 => 'gi.contracts/group/payment/process' +// 1 => 'gi.contracts/group/payment/' +// 2 => 'gi.contracts/group' +// 3 => 'gi.contracts' +// 4 => 'group' +// 5 => 'payment' +export default sbp('sbp/selectors/register', { + // https://www.wordnik.com/words/chelonia + // https://gitlab.okturtles.org/okturtles/group-income/-/wikis/E2E-Protocol/Framework.md#alt-names + 'chelonia/_init': function () { + this.config = { + // TODO: handle connecting to multiple servers for federation + get connectionURL() { throw new Error('Invalid use of connectionURL before initialization'); }, + // override! + set connectionURL(value) { Object.defineProperty(this, 'connectionURL', { value, writable: true }); }, + stateSelector: 'chelonia/private/state', // override to integrate with, for example, vuex + contracts: { + defaults: { + modules: {}, // '' => resolved module import + exposedGlobals: {}, + allowedDomains: [], + allowedSelectors: [], + preferSlim: false + }, + overrides: {}, // override default values per-contract + manifests: {} // override! contract names => manifest hashes + }, + whitelisted: (action) => !!this.whitelistedActions[action], + reactiveSet: (obj, key, value) => { obj[key] = value; return value; }, // example: set to Vue.set + fetch: (...args) => fetch(...args), + reactiveDel: (obj, key) => { delete obj[key]; }, + // acceptAllMessages disables checking whether we are expecting a message + // or not for processing + acceptAllMessages: false, + skipActionProcessing: false, + skipDecryptionAttempts: false, + skipSideEffects: false, + // Strict processing will treat all processing errors as unrecoverable + // This is useful, e.g., in the server, to prevent invalid messages from + // being added to the database + strictProcessing: false, + // Strict ordering will throw on past events with ChelErrorAlreadyProcessed + // Similarly, future events will not be reingested and will throw + // with ChelErrorDBBadPreviousHEAD + strictOrdering: false, + connectionOptions: { + maxRetries: Infinity, // See https://github.com/okTurtles/group-income/issues/1183 + reconnectOnTimeout: true // can be enabled since we are not doing auth via web sockets + }, + hooks: { + preHandleEvent: null, // async (message: SPMessage) => {} + postHandleEvent: null, // async (message: SPMessage) => {} + processError: null, // (e: Error, message: SPMessage) => {} + sideEffectError: null, // (e: Error, message: SPMessage) => {} + handleEventError: null, // (e: Error, message: SPMessage) => {} + syncContractError: null, // (e: Error, contractID: string) => {} + pubsubError: null // (e:Error, socket: Socket) + }, + unwrapMaybeEncryptedData + }; + // Used in publishEvent to cancel sending events after reset (logout) + this._instance = Object.create(null); + this.abortController = new AbortController(); + this.state = { + contracts: {}, // contractIDs => { type, HEAD } (contracts we've subscribed to) + pending: [] // prevents processing unexpected data from a malicious server + }; + this.manifestToContract = {}; + this.whitelistedActions = {}; + this.currentSyncs = Object.create(null); + this.postSyncOperations = Object.create(null); + this.sideEffectStacks = Object.create(null); // [contractID]: Array + this.sideEffectStack = (contractID) => { + let stack = this.sideEffectStacks[contractID]; + if (!stack) { + this.sideEffectStacks[contractID] = stack = []; + } + return stack; + }; + // setPostSyncOp defines operations to be run after all recent events have + // been processed. This is useful, for example, when responding to + // OP_KEY_REQUEST, as we want to send an OP_KEY_SHARE only to yet-unanswered + // requests, which is information in the future (from the point of view of + // the event handler). + // We could directly enqueue the operations, but by using a map we avoid + // enqueueing more operations than necessary + // The operations defined here will be executed: + // (1) After a call to /sync or /syncContract; or + // (2) After an event has been handled, if it was received on a web socket + this.setPostSyncOp = (contractID, key, op) => { + this.postSyncOperations[contractID] = this.postSyncOperations[contractID] || Object.create(null); + this.postSyncOperations[contractID][key] = op; + }; + const secretKeyGetter = (o, p) => { + if (has(o, p)) + return o[p]; + const rootState = sbp(this.config.stateSelector); + if (rootState?.secretKeys && has(rootState.secretKeys, p)) { + const key = deserializeKey(rootState.secretKeys[p]); + o[p] = key; + return key; + } + }; + const secretKeyList = (o) => { + const rootState = sbp(this.config.stateSelector); + const stateKeys = Object.keys(rootState?.secretKeys || {}); + return Array.from(new Set([...Object.keys(o), ...stateKeys])); + }; + this.transientSecretKeys = new Proxy(Object.create(null), { + get: secretKeyGetter, + ownKeys: secretKeyList + }); + this.ephemeralReferenceCount = Object.create(null); + // subscriptionSet includes all the contracts in state.contracts for which + // we can process events (contracts for which we have called /sync) + // The reason we can't use, e.g., Object.keys(state.contracts), is that + // when resetting the state (calling /reset, e.g., after logging out) we may + // still receive events for old contracts that belong to the old session. + // Those events must be ignored or discarded until the new session is set up + // (i.e., login has finished running) because we don't necessarily have + // all the information needed to process events in those contracts, such as + // secret keys. + // A concrete example is: + // 1. user1 logs in to the group and rotates the group keys, then logs out + // 2. user2 logs in to the group. + // 3. If an event came over the web socket for the group, we must not + // process it before we've processed the OP_KEY_SHARE containing the + // new keys, or else we'll build an incorrect state. + // The example above is simplified, but this is even more of an issue + // when there is a third contract (for example, a group chatroom) using + // those rotated keys as foreign keys. + this.subscriptionSet = new Set(); + // pending includes contracts that are scheduled for syncing or in the + // process of syncing for the first time. After sync completes for the + // first time, they are removed from pending and added to subscriptionSet + this.pending = []; + }, + 'chelonia/config': function () { + return { + ...cloneDeep(this.config), + fetch: this.config.fetch, + reactiveSet: this.config.reactiveSet, + reactiveDel: this.config.reactiveDel + }; + }, + 'chelonia/configure': async function (config) { + merge(this.config, config); + // merge will strip the hooks off of config.hooks when merging from the root of the object + // because they are functions and cloneDeep doesn't clone functions + Object.assign(this.config.hooks, config.hooks || {}); + // using Object.assign here instead of merge to avoid stripping away imported modules + if (config.contracts) { + Object.assign(this.config.contracts.defaults, config.contracts.defaults || {}); + const manifests = this.config.contracts.manifests; + console.debug('[chelonia] preloading manifests:', Object.keys(manifests)); + for (const contractName in manifests) { + await sbp('chelonia/private/loadManifest', contractName, manifests[contractName]); + } + } + if (has(config, 'skipDecryptionAttempts')) { + if (config.skipDecryptionAttempts) { + this.config.unwrapMaybeEncryptedData = (data) => { + if (!isEncryptedData(data)) { + return { + encryptionKeyId: null, data + }; + } + }; + } + else { + this.config.unwrapMaybeEncryptedData = unwrapMaybeEncryptedData; + } + } + }, + 'chelonia/reset': async function (newState, postCleanupFn) { + // Allow optional newState OR postCleanupFn + if (typeof newState === 'function' && typeof postCleanupFn === 'undefined') { + postCleanupFn = newState; + newState = undefined; + } + if (this.pubsub) { + sbp('chelonia/private/stopClockSync'); + } + // wait for any pending sync operations to finish before saving + Object.keys(this.postSyncOperations).forEach(cID => { + sbp('chelonia/private/enqueuePostSyncOps', cID); + }); + await sbp('chelonia/contract/waitPublish'); + await sbp('chelonia/contract/wait'); + // do this again to catch operations that are the result of side-effects + // or post sync ops + Object.keys(this.postSyncOperations).forEach(cID => { + sbp('chelonia/private/enqueuePostSyncOps', cID); + }); + await sbp('chelonia/contract/waitPublish'); + await sbp('chelonia/contract/wait'); + const result = await postCleanupFn?.(); + // The following are all synchronous operations + const rootState = sbp(this.config.stateSelector); + // Cancel all outgoing messages by replacing this._instance + this._instance = Object.create(null); + this.abortController.abort(); + this.abortController = new AbortController(); + // Remove all contracts, including all contracts from pending + reactiveClearObject(rootState, this.config.reactiveDel); + this.config.reactiveSet(rootState, 'contracts', Object.create(null)); + clearObject(this.ephemeralReferenceCount); + this.pending.splice(0); + clearObject(this.currentSyncs); + clearObject(this.postSyncOperations); + clearObject(this.sideEffectStacks); + const removedContractIDs = Array.from(this.subscriptionSet); + this.subscriptionSet.clear(); + sbp('chelonia/clearTransientSecretKeys'); + sbp('okTurtles.events/emit', CHELONIA_RESET); + sbp('okTurtles.events/emit', CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { added: [], removed: removedContractIDs }); + if (this.pubsub) { + sbp('chelonia/private/startClockSync'); + } + if (newState) { + Object.entries(newState).forEach(([key, value]) => { + this.config.reactiveSet(rootState, key, value); + }); + } + return result; + }, + 'chelonia/storeSecretKeys': function (wkeys) { + const rootState = sbp(this.config.stateSelector); + if (!rootState.secretKeys) + this.config.reactiveSet(rootState, 'secretKeys', Object.create(null)); + let keys = wkeys.valueOf(); + if (!keys) + return; + if (!Array.isArray(keys)) + keys = [keys]; + keys.forEach(({ key, transient }) => { + if (!key) + return; + if (typeof key === 'string') { + key = deserializeKey(key); + } + const id = keyId(key); + // Store transient keys transientSecretKeys + if (!has(this.transientSecretKeys, id)) { + this.transientSecretKeys[id] = key; + } + if (transient) + return; + // If the key is marked as persistent, write it to the state as well + if (!has(rootState.secretKeys, id)) { + this.config.reactiveSet(rootState.secretKeys, id, serializeKey(key, true)); + } + }); + }, + 'chelonia/clearTransientSecretKeys': function (ids) { + if (Array.isArray(ids)) { + ids.forEach((id) => { + delete this.transientSecretKeys[id]; + }); + } + else { + Object.keys(this.transientSecretKeys).forEach((id) => { + delete this.transientSecretKeys[id]; + }); + } + }, + 'chelonia/haveSecretKey': function (keyId, persistent) { + if (!persistent && has(this.transientSecretKeys, keyId)) + return true; + const rootState = sbp(this.config.stateSelector); + return !!rootState?.secretKeys && has(rootState.secretKeys, keyId); + }, + 'chelonia/contract/isResyncing': function (contractIDOrState) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + return !!contractIDOrState?._volatile?.dirty || !!contractIDOrState?._volatile?.resyncing; + }, + 'chelonia/contract/hasKeyShareBeenRespondedBy': function (contractIDOrState, requestedToContractID, reference) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const result = Object.values(contractIDOrState?._vm.authorizedKeys || {}).some((r) => { + return r?.meta?.keyRequest?.responded && r.meta.keyRequest.contractID === requestedToContractID && (!reference || r.meta.keyRequest.reference === reference); + }); + return result; + }, + 'chelonia/contract/waitingForKeyShareTo': function (contractIDOrState, requestingContractID, reference) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const result = contractIDOrState._volatile?.pendingKeyRequests + ?.filter((r) => { + return r && (!requestingContractID || r.contractID === requestingContractID) && (!reference || r.reference === reference); + }) + ?.map(({ name }) => name); + if (!result?.length) + return null; + return result; + }, + 'chelonia/contract/successfulKeySharesByContractID': function (contractIDOrState, requestingContractID) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const keyShares = Object.values(contractIDOrState._vm.keyshares || {}); + if (!keyShares?.length) + return; + const result = Object.create(null); + keyShares.forEach((kS) => { + if (!kS.success) + return; + if (requestingContractID && kS.contractID !== requestingContractID) + return; + if (!result[kS.contractID]) + result[kS.contractID] = []; + result[kS.contractID].push({ height: kS.height, hash: kS.hash }); + }); + Object.keys(result).forEach(cID => { + result[cID].sort((a, b) => { + return b.height - a.height; + }); + }); + return result; + }, + 'chelonia/contract/hasKeysToPerformOperation': function (contractIDOrState, operation) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const op = (operation !== '*') ? [operation] : operation; + return !!findSuitableSecretKeyId(contractIDOrState, op, ['sig']); + }, + // Did sourceContractIDOrState receive an OP_KEY_SHARE to perform the given + // operation on contractIDOrState? + 'chelonia/contract/receivedKeysToPerformOperation': function (sourceContractIDOrState, contractIDOrState, operation) { + const rootState = sbp(this.config.stateSelector); + if (typeof sourceContractIDOrState === 'string') { + sourceContractIDOrState = rootState[sourceContractIDOrState]; + } + if (typeof contractIDOrState === 'string') { + contractIDOrState = rootState[contractIDOrState]; + } + const op = (operation !== '*') ? [operation] : operation; + const keyId = findSuitableSecretKeyId(contractIDOrState, op, ['sig']); + return sourceContractIDOrState?._vm?.sharedKeyIds?.some((sK) => sK.id === keyId); + }, + 'chelonia/contract/currentKeyIdByName': function (contractIDOrState, name, requireSecretKey) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const currentKeyId = findKeyIdByName(contractIDOrState, name); + if (requireSecretKey && !sbp('chelonia/haveSecretKey', currentKeyId)) { + return; + } + return currentKeyId; + }, + 'chelonia/contract/foreignKeysByContractID': function (contractIDOrState, foreignContractID) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + return findForeignKeysByContractID(contractIDOrState, foreignContractID); + }, + 'chelonia/contract/historicalKeyIdsByName': function (contractIDOrState, name) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const currentKeyId = findKeyIdByName(contractIDOrState, name); + const revokedKeyIds = findRevokedKeyIdsByName(contractIDOrState, name); + return currentKeyId ? [currentKeyId, ...revokedKeyIds] : revokedKeyIds; + }, + 'chelonia/contract/suitableSigningKey': function (contractIDOrState, permissions, purposes, ringLevel, allowedActions) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector); + contractIDOrState = rootState[contractIDOrState]; + } + const keyId = findSuitableSecretKeyId(contractIDOrState, permissions, purposes, ringLevel, allowedActions); + return keyId; + }, + 'chelonia/contract/setPendingKeyRevocation': function (contractID, names) { + const rootState = sbp(this.config.stateSelector); + const state = rootState[contractID]; + if (!state._volatile) + this.config.reactiveSet(state, '_volatile', Object.create(null)); + if (!state._volatile.pendingKeyRevocations) + this.config.reactiveSet(state._volatile, 'pendingKeyRevocations', Object.create(null)); + for (const name of names) { + const keyId = findKeyIdByName(state, name); + if (keyId) { + this.config.reactiveSet(state._volatile.pendingKeyRevocations, keyId, true); + } + else { + console.warn('[setPendingKeyRevocation] Unable to find keyId for name', { contractID, name }); + } + } + }, + 'chelonia/shelterAuthorizationHeader'(contractID) { + return buildShelterAuthorizationHeader.call(this, contractID); + }, + // The purpose of the 'chelonia/crypto/*' selectors is so that they can be called + // from contracts without including the crypto code (i.e., importing crypto.js) + // This function takes a function as a parameter that returns a string + // It does not a string directly to prevent accidentally logging the value, + // which is a secret + 'chelonia/crypto/keyId': (inKey) => { + return keyId(inKey.valueOf()); + }, + // TODO: allow connecting to multiple servers at once + 'chelonia/connect': function (options = {}) { + if (!this.config.connectionURL) + throw new Error('config.connectionURL missing'); + if (!this.config.connectionOptions) + throw new Error('config.connectionOptions missing'); + if (this.pubsub) { + this.pubsub.destroy(); + } + let pubsubURL = this.config.connectionURL; + if (process.env.NODE_ENV === 'development') { + // This is temporarily used in development mode to help the server improve + // its console output until we have a better solution. Do not use for auth. + pubsubURL += `?debugID=${randomHexString(6)}`; + } + if (this.pubsub) { + sbp('chelonia/private/stopClockSync'); + } + sbp('chelonia/private/startClockSync'); + this.pubsub = createClient(pubsubURL, { + ...this.config.connectionOptions, + handlers: { + ...options.handlers, + // Every time we get a REQUEST_TYPE.SUB response, which happens for + // 'new' subscriptions as well as every time the connection is reset + 'subscription-succeeded': function (event) { + const { channelID } = event.detail; + // The check below is needed because we could have unsubscribed since + // requesting a subscription from the server. In that case, we don't + // need to call `sync`. + if (this.subscriptionSet.has(channelID)) { + // For new subscriptions, some messages could have been lost + // between the time the subscription was requested and it was + // actually set up. In these cases, force sync contracts to get them + // updated. + sbp('chelonia/private/out/sync', channelID, { force: true }).catch((err) => { + console.warn(`[chelonia] Syncing contract ${channelID} failed: ${err.message}`); + }); + } + options.handlers?.['subscription-succeeded']?.call(this, event); + } + }, + // Map message handlers to transparently handle encryption and signatures + messageHandlers: { + ...(Object.fromEntries(Object.entries(options.messageHandlers || {}).map(([k, v]) => { + switch (k) { + case NOTIFICATION_TYPE.PUB: + return [k, (msg) => { + if (!msg.channelID) { + console.info('[chelonia] Discarding pub event without channelID'); + return; + } + if (!this.subscriptionSet.has(msg.channelID)) { + console.info(`[chelonia] Discarding pub event for ${msg.channelID} because it's not in the current subscriptionSet`); + return; + } + sbp('chelonia/queueInvocation', msg.channelID, () => { + v.call(this.pubsub, parseEncryptedOrUnencryptedMessage(this, { + contractID: msg.channelID, + serializedData: msg.data + })); + }).catch((e) => { + console.error(`[chelonia] Error processing pub event for ${msg.channelID}`, e); + }); + }]; + case NOTIFICATION_TYPE.KV: + return [k, (msg) => { + if (!msg.channelID || !msg.key) { + console.info('[chelonia] Discarding kv event without channelID or key'); + return; + } + if (!this.subscriptionSet.has(msg.channelID)) { + console.info(`[chelonia] Discarding kv event for ${msg.channelID} because it's not in the current subscriptionSet`); + return; + } + sbp('chelonia/queueInvocation', msg.channelID, () => { + v.call(this.pubsub, [msg.key, parseEncryptedOrUnencryptedMessage(this, { + contractID: msg.channelID, + meta: msg.key, + serializedData: JSON.parse(Buffer.from(msg.data).toString()) + })]); + }).catch((e) => { + console.error(`[chelonia] Error processing kv event for ${msg.channelID} and key ${msg.key}`, msg, e); + }); + }]; + case NOTIFICATION_TYPE.DELETION: + return [k, (msg) => v.call(this.pubsub, msg.data)]; + default: + return [k, v]; + } + }))), + [NOTIFICATION_TYPE.ENTRY](msg) { + // We MUST use 'chelonia/private/in/enqueueHandleEvent' to ensure handleEvent() + // is called AFTER any currently-running calls to 'chelonia/private/out/sync' + // to prevent gi.db from throwing "bad previousHEAD" errors. + // Calling via SBP also makes it simple to implement 'test/backend.mjs' + const { contractID } = SPMessage.deserializeHEAD(msg.data); + sbp('chelonia/private/in/enqueueHandleEvent', contractID, msg.data); + } + } + }); + if (!this.contractsModifiedListener) { + // Keep pubsub in sync (logged into the right "rooms") with 'state.contracts' + this.contractsModifiedListener = () => sbp('chelonia/pubsub/update'); + sbp('okTurtles.events/on', CONTRACTS_MODIFIED, this.contractsModifiedListener); + } + return this.pubsub; + }, + // This selector is defined primarily for ingesting web push notifications, + // although it can be used as a general-purpose API to process events received + // from other external sources that are not managed by Chelonia itself (i.e. sources + // other than the Chelonia-managed websocket connection and RESTful API). + 'chelonia/handleEvent': async function (event) { + const { contractID } = SPMessage.deserializeHEAD(event); + return await sbp('chelonia/private/in/enqueueHandleEvent', contractID, event); + }, + 'chelonia/defineContract': function (contract) { + if (!ACTION_REGEX.exec(contract.name)) + throw new Error(`bad contract name: ${contract.name}`); + if (!contract.metadata) + contract.metadata = { validate() { }, create: () => ({}) }; + if (!contract.getters) + contract.getters = {}; + contract.state = (contractID) => sbp(this.config.stateSelector)[contractID]; + contract.manifest = this.defContractManifest; + contract.sbp = this.defContractSBP; + this.defContractSelectors = []; + this.defContract = contract; + this.defContractSelectors.push(...sbp('sbp/selectors/register', { + // expose getters for Vuex integration and other conveniences + [`${contract.manifest}/${contract.name}/getters`]: () => contract.getters, + // 2 ways to cause sideEffects to happen: by defining a sideEffect function in the + // contract, or by calling /pushSideEffect w/async SBP call. Can also do both. + [`${contract.manifest}/${contract.name}/pushSideEffect`]: (contractID, asyncSbpCall) => { + // if this version of the contract is pushing a sideEffect to a function defined by the + // contract itself, make sure that it calls the same version of the sideEffect + const [sel] = asyncSbpCall; + if (sel.startsWith(contract.name + '/')) { + asyncSbpCall[0] = `${contract.manifest}/${sel}`; + } + this.sideEffectStack(contractID).push(asyncSbpCall); + } + })); + for (const action in contract.actions) { + contractNameFromAction(action); // ensure actions are appropriately named + this.whitelistedActions[action] = true; + // TODO: automatically generate send actions here using `${action}/send` + // allow the specification of: + // - the optype (e.g. OP_ACTION_(UN)ENCRYPTED) + // - a localized error message + // - whatever keys should be passed in as well + // base it off of the design of encryptedAction() + this.defContractSelectors.push(...sbp('sbp/selectors/register', { + [`${contract.manifest}/${action}/process`]: async (message, state) => { + const { meta, data, contractID } = message; + // TODO: optimize so that you're creating a proxy object only when needed + // TODO: Note: when sandboxing contracts, contracts may not have + // access to the state directly, meaning that modifications would need + // to be re-applied + state = state || contract.state(contractID); + const gProxy = gettersProxy(state, contract.getters); + // These `await` are here to help with sandboxing in the future + // Sandboxing may mean that contracts are executed in another context + // (e.g., a worker), which would require asynchronous communication + // between Chelonia and the contract. + // Even though these are asynchronous calls, contracts should not + // call side effects from these functions + await contract.metadata.validate(meta, { state, ...gProxy, contractID }); + await contract.actions[action].validate(data, { state, ...gProxy, meta, message, contractID }); + // it's possible that the sideEffect stack got filled up by the call to `processMessage` from + // a call to `publishEvent` (when an outgoing message is being sent). + this.sideEffectStacks[contractID] = []; + await contract.actions[action].process(message, { state, ...gProxy }); + }, + // 'mutation' is an object that's similar to 'message', but not identical + [`${contract.manifest}/${action}/sideEffect`]: async (mutation, state) => { + if (contract.actions[action].sideEffect) { + state = state || contract.state(mutation.contractID); + if (!state) { + console.warn(`[${contract.manifest}/${action}/sideEffect]: Skipping side-effect since there is no contract state for contract ${mutation.contractID}`); + return; + } + // TODO: Copy to simulate a sandbox boundary without direct access + // as well as to enforce the rule that side-effects must not mutate + // state + const stateCopy = cloneDeep(state); + const gProxy = gettersProxy(stateCopy, contract.getters); + await contract.actions[action].sideEffect(mutation, { state: stateCopy, ...gProxy }); + } + // since both /process and /sideEffect could call /pushSideEffect, we make sure + // to process the side effects on the stack after calling /sideEffect. + const sideEffects = this.sideEffectStack(mutation.contractID); + while (sideEffects.length > 0) { + const sideEffect = sideEffects.shift(); + try { + await contract.sbp(...sideEffect); + } + catch (e_) { + const e = e_; + console.error(`[chelonia] ERROR: '${e.name}' ${e.message}, for pushed sideEffect of ${mutation.description}:`, sideEffect); + this.sideEffectStacks[mutation.contractID] = []; // clear the side effects + throw e; + } + } + } + })); + } + for (const method in contract.methods) { + this.defContractSelectors.push(...sbp('sbp/selectors/register', { + [`${contract.manifest}/${method}`]: contract.methods[method] + })); + } + sbp('okTurtles.events/emit', CONTRACT_REGISTERED, contract); + }, + 'chelonia/queueInvocation': (contractID, sbpInvocation) => { + // We maintain two queues, contractID, used for internal events (i.e., + // from chelonia) and public:contractID, used for operations that need to + // be done after all the current internal events (if any) have + // finished processing. + // Once all of the current internal events (in the contractID queue) + // have completed, the operation requested is put into the public queue. + // The reason for maintaining two different queues is to provide users + // a way to run operations after internal operations have been processed + // (for example, a side-effect might call queueInvocation to do work + // after the current and future events have been processed), without the + // work in these user-functions blocking Chelonia and prventing it from + // processing events. + // For example, a contract could have an action called + // 'example/setProfilePicture'. The side-effect could look like this: + // + // sideEffect ({ data, contractID }, { state }) { + // const profilePictureUrl = data.url + // + // sbp('chelonia/queueInvocation', contractID, () => { + // const rootState = sbp('state/vuex/state') + // if (rootState[contractID].profilePictureUrl !== profilePictureUrl) + // return // The profile picture changed, so we do nothing + // + // // The following could take a long time. We want Chelonia + // // to still work and process events as normal. + // return this.config.fetch(profilePictureUrl).then(doSomeWorkWithTheFile) + // }) + // } + return sbp('chelonia/private/queueEvent', contractID, ['chelonia/private/noop']).then(() => sbp('chelonia/private/queueEvent', 'public:' + contractID, sbpInvocation)); + }, + 'chelonia/begin': async (...invocations) => { + for (const invocation of invocations) { + await sbp(...invocation); + } + }, + // call this manually to resubscribe/unsubscribe from contracts as needed + // if you are using a custom stateSelector and reload the state (e.g. upon login) + 'chelonia/pubsub/update': function () { + const client = this.pubsub; + const subscribedIDs = [...client.subscriptionSet]; + const currentIDs = Array.from(this.subscriptionSet); + const leaveSubscribed = intersection(subscribedIDs, currentIDs); + const toUnsubscribe = difference(subscribedIDs, leaveSubscribed); + const toSubscribe = difference(currentIDs, leaveSubscribed); + // There is currently no need to tell other clients about our sub/unsubscriptions. + try { + for (const contractID of toUnsubscribe) { + client.unsub(contractID); + } + for (const contractID of toSubscribe) { + client.sub(contractID); + } + } + catch (e) { + console.error(`[chelonia] pubsub/update: error ${e.name}: ${e.message}`, { toUnsubscribe, toSubscribe }, e); + this.config.hooks.pubsubError?.(e, client); + } + }, + // resolves when all pending actions for these contractID(s) finish + 'chelonia/contract/wait': function (contractIDs) { + const listOfIds = contractIDs + ? (typeof contractIDs === 'string' ? [contractIDs] : contractIDs) + : Object.keys(sbp(this.config.stateSelector).contracts); + return Promise.all(listOfIds.flatMap(cID => { + return sbp('chelonia/queueInvocation', cID, ['chelonia/private/noop']); + })); + }, + // resolves when all pending *writes* for these contractID(s) finish + 'chelonia/contract/waitPublish': function (contractIDs) { + const listOfIds = contractIDs + ? (typeof contractIDs === 'string' ? [contractIDs] : contractIDs) + : Object.keys(sbp(this.config.stateSelector).contracts); + return Promise.all(listOfIds.flatMap(cID => { + return sbp('chelonia/private/queueEvent', `publish:${cID}`, ['chelonia/private/noop']); + })); + }, + // 'chelonia/contract' - selectors related to injecting remote data and monitoring contracts + // TODO: add an optional parameter to "retain" the contract (see #828) + // eslint-disable-next-line require-await + 'chelonia/contract/sync': async function (contractIDs, params) { + // The exposed `chelonia/contract/sync` selector is meant for users of + // Chelonia and not for internal use within Chelonia. + // It should only be called after `/retain` where needed (for example, when + // starting up Chelonia with a saved state) + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs; + // Verify that there's a valid reference count + listOfIds.forEach((id) => { + if (checkCanBeGarbageCollected.call(this, id)) { + if (process.env.CI) { + Promise.reject(new Error('[chelonia] Missing reference count for contract ' + id)); + } + console.error('[chelonia] Missing reference count for contract ' + id); + throw new Error('Missing reference count for contract'); + } + }); + // Call the internal sync selector. `force` is always true as using `/sync` + // besides internally is only needed to force sync a contract + return sbp('chelonia/private/out/sync', listOfIds, { ...params, force: true }); + }, + 'chelonia/contract/isSyncing': function (contractID, { firstSync = false } = {}) { + const isSyncing = !!this.currentSyncs[contractID]; + return firstSync + ? isSyncing && this.currentSyncs[contractID].firstSync + : isSyncing; + }, + 'chelonia/contract/currentSyncs': function () { + return Object.keys(this.currentSyncs); + }, + // Because `/remove` is done asynchronously and a contract might be removed + // much later than when the call to remove was made, an optional callback + // can be passed to verify whether to proceed with removal. This is used as + // part of the `/release` mechanism to prevent removing contracts that have + // acquired new references since the call to `/remove`. + 'chelonia/contract/remove': function (contractIDs, { confirmRemovalCallback, permanent } = {}) { + const rootState = sbp(this.config.stateSelector); + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs; + return Promise.all(listOfIds.map(contractID => { + if (!rootState?.contracts?.[contractID]) { + return undefined; + } + return sbp('chelonia/private/queueEvent', contractID, () => { + // This allows us to double-check that the contract is meant to be + // removed, as circumstances could have changed from the time remove + // was called and this function is executed. For example, `/release` + // makes a synchronous check, but processing of other events since + // require this to be re-checked (in this case, for reference counts). + if (confirmRemovalCallback && !confirmRemovalCallback(contractID)) { + return; + } + const rootState = sbp(this.config.stateSelector); + const fkContractIDs = Array.from(new Set(Object.values(rootState[contractID]?._vm?.authorizedKeys ?? {}).filter((k) => { + return !!k.foreignKey; + }).map((k) => { + try { + const fkUrl = new URL(k.foreignKey); + return fkUrl.pathname; + } + catch { + return undefined; + } + }).filter(Boolean))); + sbp('chelonia/private/removeImmediately', contractID, { permanent }); + if (fkContractIDs.length) { + // Attempt to release all contracts that are being monitored for + // foreign keys + sbp('chelonia/contract/release', fkContractIDs, { try: true }).catch((e) => { + console.error('[chelonia] Error attempting to release foreign key contracts', e); + }); + } + }); + })); + }, + 'chelonia/contract/retain': async function (contractIDs, params) { + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs; + const rootState = sbp(this.config.stateSelector); + if (listOfIds.length === 0) + return Promise.resolve(); + const checkIfDeleted = (id) => { + // Contract has been permanently deleted + if (rootState.contracts[id] === null) { + console.error('[chelonia/contract/retain] Called /retain on permanently deleted contract.', id); + throw new ChelErrorResourceGone('Unable to retain permanently deleted contract ' + id); + } + }; + if (!params?.ephemeral) { + listOfIds.forEach((id) => { + checkIfDeleted(id); + if (!has(rootState.contracts, id)) { + this.config.reactiveSet(rootState.contracts, id, Object.create(null)); + } + this.config.reactiveSet(rootState.contracts[id], 'references', (rootState.contracts[id].references ?? 0) + 1); + }); + } + else { + listOfIds.forEach((id) => { + checkIfDeleted(id); + if (!has(this.ephemeralReferenceCount, id)) { + this.ephemeralReferenceCount[id] = 1; + } + else { + this.ephemeralReferenceCount[id] = this.ephemeralReferenceCount[id] + 1; + } + }); + } + return await sbp('chelonia/private/out/sync', listOfIds); + }, + // the `try` parameter does not affect (ephemeral or persistent) reference + // counts, but rather removes a contract if the reference count is zero + // and the contract isn't being monitored for foreign keys. This parameter + // is meant mostly for internal chelonia use, so that removing or releasing + // a contract can also remove other contracts that this first contract + // was monitoring. + 'chelonia/contract/release': async function (contractIDs, params) { + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs; + const rootState = sbp(this.config.stateSelector); + if (!params?.try) { + if (!params?.ephemeral) { + listOfIds.forEach((id) => { + // Contract has been permanently deleted + if (rootState.contracts[id] === null) { + console.warn('[chelonia/contract/release] Called /release on permanently deleted contract. This has no effect.', id); + return; + } + if (has(rootState.contracts, id) && has(rootState.contracts[id], 'references')) { + const current = rootState.contracts[id].references; + if (current === 0) { + console.error('[chelonia/contract/release] Invalid negative reference count for', id); + if (process.env.CI) { + // If running in CI, force tests to fail + Promise.reject(new Error('Invalid negative reference count: ' + id)); + } + throw new Error('Invalid negative reference count'); + } + if (current <= 1) { + this.config.reactiveDel(rootState.contracts[id], 'references'); + } + else { + this.config.reactiveSet(rootState.contracts[id], 'references', current - 1); + } + } + else { + console.error('[chelonia/contract/release] Invalid negative reference count for', id); + if (process.env.CI) { + // If running in CI, force tests to fail + Promise.reject(new Error('Invalid negative reference count: ' + id)); + } + throw new Error('Invalid negative reference count'); + } + }); + } + else { + listOfIds.forEach((id) => { + // Contract has been permanently deleted + if (rootState.contracts[id] === null) { + console.warn('[chelonia/contract/release] Called /release on permanently deleted contract. This has no effect.', id); + return; + } + if (has(this.ephemeralReferenceCount, id)) { + const current = this.ephemeralReferenceCount[id] ?? 0; + if (current <= 1) { + delete this.ephemeralReferenceCount[id]; + } + else { + this.ephemeralReferenceCount[id] = current - 1; + } + } + else { + console.error('[chelonia/contract/release] Invalid negative ephemeral reference count for', id); + if (process.env.CI) { + // If running in CI, force tests to fail + Promise.reject(new Error('Invalid negative ephemeral reference count: ' + id)); + } + throw new Error('Invalid negative ephemeral reference count'); + } + }); + } + } + // This function will be called twice. The first time, it provides a list of + // candidate contracts to remove. The second time, it confirms that the + // contract is safe to remove + const boundCheckCanBeGarbageCollected = checkCanBeGarbageCollected.bind(this); + const idsToRemove = listOfIds.filter(boundCheckCanBeGarbageCollected); + return idsToRemove.length ? await sbp('chelonia/contract/remove', idsToRemove, { confirmRemovalCallback: boundCheckCanBeGarbageCollected }) : undefined; + }, + 'chelonia/contract/disconnect': async function (contractID, contractIDToDisconnect) { + const state = sbp(this.config.stateSelector); + const contractState = state[contractID]; + const keyIds = Object.values(contractState._vm.authorizedKeys).filter((k) => { + return k._notAfterHeight == null && k.meta?.keyRequest?.contractID === contractIDToDisconnect; + }).map(k => k.id); + if (!keyIds.length) + return; + return await sbp('chelonia/out/keyDel', { + contractID, + contractName: contractState._vm.type, + data: keyIds, + signingKeyId: findSuitableSecretKeyId(contractState, [SPMessage.OP_KEY_DEL], ['sig']) + }); + }, + 'chelonia/in/processMessage': function (messageOrRawMessage, state) { + const stateCopy = cloneDeep(state); + const message = typeof messageOrRawMessage === 'string' ? SPMessage.deserialize(messageOrRawMessage, this.transientSecretKeys, stateCopy, this.config.unwrapMaybeEncryptedData) : messageOrRawMessage; + return sbp('chelonia/private/in/processMessage', message, stateCopy).then(() => stateCopy).catch((e) => { + console.warn(`chelonia/in/processMessage: reverting mutation ${message.description()}: ${message.serialize()}`, e); + return state; + }); + }, + 'chelonia/out/fetchResource': async function (cid, { code } = {}) { + const parsedCID = parseCID(cid); + if (code != null) { + if (parsedCID.code !== code) { + throw new Error(`Invalid CID content type. Expected ${code}, got ${parsedCID.code}`); + } + } + // Note that chelonia.db/get (set) is a no-op for lightweight clients + // This was added for consistency (processing an event also adds it to the DB) + const local = await sbp('chelonia.db/get', cid); + // We don't verify the CID because it's already been verified when it was set + if (local != null) + return local; + const url = `${this.config.connectionURL}/file/${cid}`; + const data = await this.config.fetch(url, { signal: this.abortController.signal }).then(handleFetchResult('text')); + const ourHash = createCID(data, parsedCID.code); + if (ourHash !== cid) { + throw new Error(`expected hash ${cid}. Got: ${ourHash}`); + } + await sbp('chelonia.db/set', cid, data); + return data; + }, + 'chelonia/out/latestHEADInfo': function (contractID) { + return this.config.fetch(`${this.config.connectionURL}/latestHEADinfo/${contractID}`, { + cache: 'no-store', + signal: this.abortController.signal + }).then(handleFetchResult('json')); + }, + 'chelonia/out/eventsAfter': eventsAfter, + 'chelonia/out/eventsBefore': function (contractID, beforeHeight, limit, options) { + if (limit <= 0) { + console.error('[chelonia] invalid params error: "limit" needs to be positive integer'); + } + const offset = Math.max(0, beforeHeight - limit + 1); + const eventsAfterLimit = Math.min(beforeHeight + 1, limit); + return sbp('chelonia/out/eventsAfter', contractID, offset, eventsAfterLimit, undefined, options); + }, + 'chelonia/out/eventsBetween': function (contractID, startHash, endHeight, offset = 0, { stream } = { stream: true }) { + if (offset < 0) { + console.error('[chelonia] invalid params error: "offset" needs to be positive integer or zero'); + return; + } + let reader; + const s = new ReadableStream({ + start: async (controller) => { + const first = await this.config.fetch(`${this.config.connectionURL}/file/${startHash}`, { signal: this.abortController.signal }).then(handleFetchResult('text')); + const deserializedHEAD = SPMessage.deserializeHEAD(first); + if (deserializedHEAD.contractID !== contractID) { + controller.error(new Error('chelonia/out/eventsBetween: Mismatched contract ID')); + return; + } + const startOffset = Math.max(0, deserializedHEAD.head.height - offset); + const limit = endHeight - startOffset + 1; + if (limit < 1) { + controller.close(); + return; + } + reader = sbp('chelonia/out/eventsAfter', contractID, startOffset, limit).getReader(); + }, + async pull(controller) { + const { done, value } = await reader.read(); + if (done) { + controller.close(); + } + else { + controller.enqueue(value); + } + } + }); + if (stream) + return s; + // Workaround for + return collectEventStream(s); + }, + 'chelonia/rootState': function () { return sbp(this.config.stateSelector); }, + 'chelonia/latestContractState': async function (contractID, options = { forceSync: false }) { + const rootState = sbp(this.config.stateSelector); + // return a copy of the state if we already have it, unless the only key that's in it is _volatile, + // in which case it means we should sync the contract to get more info. + if (rootState.contracts[contractID] === null) { + throw new ChelErrorResourceGone('Permanently deleted contract ' + contractID); + } + if (!options.forceSync && rootState[contractID] && Object.keys(rootState[contractID]).some((x) => x !== '_volatile')) { + return cloneDeep(rootState[contractID]); + } + let state = Object.create(null); + let contractName = rootState.contracts[contractID]?.type; + const eventsStream = sbp('chelonia/out/eventsAfter', contractID, 0, undefined, contractID); + const eventsStreamReader = eventsStream.getReader(); + if (rootState[contractID]) + state._volatile = rootState[contractID]._volatile; + for (;;) { + const { value: event, done } = await eventsStreamReader.read(); + if (done) + return state; + const stateCopy = cloneDeep(state); + try { + await sbp('chelonia/private/in/processMessage', SPMessage.deserialize(event, this.transientSecretKeys, state, this.config.unwrapMaybeEncryptedData), state, undefined, contractName); + if (!contractName && state._vm) { + contractName = state._vm.type; + } + } + catch (e) { + console.warn(`[chelonia] latestContractState: '${e.name}': ${e.message} processing:`, event, e.stack); + if (e instanceof ChelErrorUnrecoverable) + throw e; + state = stateCopy; + } + } + }, + 'chelonia/contract/state': function (contractID, height) { + const state = sbp(this.config.stateSelector)[contractID]; + const stateCopy = state && cloneDeep(state); + if (stateCopy?._vm && height != null) { + // Remove keys in the future + Object.keys(stateCopy._vm.authorizedKeys).forEach(keyId => { + if (stateCopy._vm.authorizedKeys[keyId]._notBeforeHeight > height) { + delete stateCopy._vm.authorizedKeys[keyId]; + } + }); + } + return stateCopy; + }, + 'chelonia/contract/fullState': function (contractID) { + const rootState = sbp(this.config.stateSelector); + if (Array.isArray(contractID)) { + return Object.fromEntries(contractID.map(contractID => { + return [ + contractID, + { + contractState: rootState[contractID], + cheloniaState: rootState.contracts[contractID] + } + ]; + })); + } + return { + contractState: rootState[contractID], + cheloniaState: rootState.contracts[contractID] + }; + }, + // 'chelonia/out' - selectors that send data out to the server + 'chelonia/out/registerContract': async function (params) { + const { contractName, keys, hooks, publishOptions, signingKeyId, actionSigningKeyId, actionEncryptionKeyId } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contractInfo = this.manifestToContract[manifestHash]; + if (!contractInfo) + throw new Error(`contract not defined: ${contractName}`); + const signingKey = this.transientSecretKeys[signingKeyId]; + if (!signingKey) + throw new Error(`Signing key ${signingKeyId} is not defined`); + const payload = { + type: contractName, + keys + }; + const contractMsg = SPMessage.createV1_0({ + contractID: null, + height: 0, + op: [ + SPMessage.OP_CONTRACT, + signedOutgoingDataWithRawKey(signingKey, payload) + ], + manifest: manifestHash + }); + const contractID = contractMsg.hash(); + await sbp('chelonia/private/out/publishEvent', contractMsg, (params.namespaceRegistration + ? { + ...publishOptions, + headers: { + ...publishOptions?.headers, + 'shelter-namespace-registration': params.namespaceRegistration + } + } + : publishOptions), hooks && { + prepublish: hooks.prepublishContract, + postpublish: hooks.postpublishContract + }); + await sbp('chelonia/private/out/sync', contractID); + const msg = await sbp(actionEncryptionKeyId + ? 'chelonia/out/actionEncrypted' + : 'chelonia/out/actionUnencrypted', { + action: contractName, + contractID, + data: params.data, + signingKeyId: actionSigningKeyId ?? signingKeyId, + encryptionKeyId: actionEncryptionKeyId, + hooks, + publishOptions + }); + return msg; + }, + 'chelonia/out/ownResources': async function (contractID) { + if (!contractID) { + throw new TypeError('A contract ID must be provided'); + } + const response = await this.config.fetch(`${this.config.connectionURL}/ownResources`, { + method: 'GET', + signal: this.abortController.signal, + headers: new Headers([ + [ + 'authorization', + buildShelterAuthorizationHeader.call(this, contractID) + ] + ]) + }); + if (!response.ok) { + console.error('Unable to fetch own resources', contractID, response.status); + throw new Error(`Unable to fetch own resources for ${contractID}: ${response.status}`); + } + return response.json(); + }, + 'chelonia/out/deleteContract': async function (contractID, credentials = {}) { + if (!contractID) { + throw new TypeError('A contract ID must be provided'); + } + if (!Array.isArray(contractID)) + contractID = [contractID]; + return await Promise.allSettled(contractID.map(async (cid) => { + const hasCredential = has(credentials, cid); + const hasToken = has(credentials[cid], 'token') && credentials[cid].token; + const hasBillableContractID = has(credentials[cid], 'billableContractID') && credentials[cid].billableContractID; + if (!hasCredential || hasToken === hasBillableContractID) { + throw new TypeError(`Either a token or a billable contract ID must be provided for ${cid}`); + } + const response = await this.config.fetch(`${this.config.connectionURL}/deleteContract/${cid}`, { + method: 'POST', + signal: this.abortController.signal, + headers: new Headers([ + ['authorization', + hasToken + ? `bearer ${credentials[cid].token.valueOf()}` + : buildShelterAuthorizationHeader.call(this, credentials[cid].billableContractID)] + ]) + }); + if (!response.ok) { + if (response.status === 404 || response.status === 410) { + console.warn('Contract appears to have been deleted already', cid, response.status); + return; + } + console.error('Unable to delete contract', cid, response.status); + throw new Error(`Unable to delete contract ${cid}: ${response.status}`); + } + })); + }, + // all of these functions will do both the creation of the SPMessage + // and the sending of it via 'chelonia/private/out/publishEvent' + 'chelonia/out/actionEncrypted': function (params) { + return outEncryptedOrUnencryptedAction.call(this, SPMessage.OP_ACTION_ENCRYPTED, params); + }, + 'chelonia/out/actionUnencrypted': function (params) { + return outEncryptedOrUnencryptedAction.call(this, SPMessage.OP_ACTION_UNENCRYPTED, params); + }, + 'chelonia/out/keyShare': async function (params) { + const { atomic, originatingContractName, originatingContractID, contractName, contractID, data, hooks, publishOptions } = params; + const originatingManifestHash = this.config.contracts.manifests[originatingContractName]; + const destinationManifestHash = this.config.contracts.manifests[contractName]; + const originatingContract = originatingContractID ? this.manifestToContract[originatingManifestHash]?.contract : undefined; + const destinationContract = this.manifestToContract[destinationManifestHash]?.contract; + if ((originatingContractID && !originatingContract) || !destinationContract) { + throw new Error('Contract name not found'); + } + const payload = data; + if (!params.signingKeyId && !params.signingKey) { + throw new TypeError('Either signingKeyId or signingKey must be specified'); + } + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_SHARE, + params.signingKeyId + ? signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + : signedOutgoingDataWithRawKey(params.signingKey, payload) + ], + manifest: destinationManifestHash + }); + if (!atomic) { + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, hooks); + } + return msg; + }, + 'chelonia/out/keyAdd': async function (params) { + // TODO: For foreign keys, recalculate the key id + // TODO: Make this a noop if the key already exsits with the given permissions + const { atomic, contractID, contractName, data, hooks, publishOptions } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const state = contract.state(contractID); + const payload = data.filter((wk) => { + const k = (isEncryptedData(wk) ? wk.valueOf() : wk); + if (has(state._vm.authorizedKeys, k.id)) { + if (state._vm.authorizedKeys[k.id]._notAfterHeight == null) { + // Can't add a key that exists + return false; + } + } + return true; + }); + if (payload.length === 0) + return; + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_ADD, + signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + if (!atomic) { + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, hooks); + } + return msg; + }, + 'chelonia/out/keyDel': async function (params) { + const { atomic, contractID, contractName, data, hooks, publishOptions } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const state = contract.state(contractID); + const payload = data.map((keyId) => { + if (isEncryptedData(keyId)) + return keyId; + if (!has(state._vm.authorizedKeys, keyId) || state._vm.authorizedKeys[keyId]._notAfterHeight != null) + return undefined; + if (state._vm.authorizedKeys[keyId]._private) { + return encryptedOutgoingData(contractID, state._vm.authorizedKeys[keyId]._private, keyId); + } + else { + return keyId; + } + }).filter(Boolean); + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_DEL, + signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + if (!atomic) { + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, hooks); + } + return msg; + }, + 'chelonia/out/keyUpdate': async function (params) { + const { atomic, contractID, contractName, data, hooks, publishOptions } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const state = contract.state(contractID); + const payload = data.map((key) => { + if (isEncryptedData(key)) + return key; + const { oldKeyId } = key; + if (state._vm.authorizedKeys[oldKeyId]._private) { + return encryptedOutgoingData(contractID, state._vm.authorizedKeys[oldKeyId]._private, key); + } + else { + return key; + } + }); + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_UPDATE, + signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + if (!atomic) { + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, hooks); + } + return msg; + }, + 'chelonia/out/keyRequest': async function (params) { + const { originatingContractID, originatingContractName, contractID, contractName, hooks, publishOptions, innerSigningKeyId, encryptionKeyId, innerEncryptionKeyId, encryptKeyRequestMetadata, reference } = params; + // `encryptKeyRequestMetadata` is optional because it could be desirable + // sometimes to allow anyone to audit OP_KEY_REQUEST and OP_KEY_SHARE + // operations. If `encryptKeyRequestMetadata` were always true, it would + // be harder in these situations to see interactions between two contracts. + const manifestHash = this.config.contracts.manifests[contractName]; + const originatingManifestHash = this.config.contracts.manifests[originatingContractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + const originatingContract = this.manifestToContract[originatingManifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const rootState = sbp(this.config.stateSelector); + try { + await sbp('chelonia/contract/retain', contractID, { ephemeral: true }); + const state = contract.state(contractID); + const originatingState = originatingContract.state(originatingContractID); + const havePendingKeyRequest = Object.values(originatingState._vm.authorizedKeys).findIndex((k) => { + return k._notAfterHeight == null && k.meta?.keyRequest?.contractID === contractID && state?._volatile?.pendingKeyRequests?.some(pkr => pkr.name === k.name); + }) !== -1; + // If there's a pending key request for this contract, return + if (havePendingKeyRequest) { + return; + } + const keyRequestReplyKey = keygen(EDWARDS25519SHA512BATCH); + const keyRequestReplyKeyId = keyId(keyRequestReplyKey); + const keyRequestReplyKeyP = serializeKey(keyRequestReplyKey, false); + const keyRequestReplyKeyS = serializeKey(keyRequestReplyKey, true); + const signingKeyId = findSuitableSecretKeyId(originatingState, [SPMessage.OP_KEY_ADD], ['sig']); + if (!signingKeyId) { + throw ChelErrorUnexpected(`Unable to send key request. Originating contract is missing a key with OP_KEY_ADD permission. contractID=${contractID} originatingContractID=${originatingContractID}`); + } + const keyAddOp = () => sbp('chelonia/out/keyAdd', { + contractID: originatingContractID, + contractName: originatingContractName, + data: [{ + id: keyRequestReplyKeyId, + name: '#krrk-' + keyRequestReplyKeyId, + purpose: ['sig'], + ringLevel: Number.MAX_SAFE_INTEGER, + permissions: params.permissions === '*' + ? '*' + : Array.isArray(params.permissions) + ? [...params.permissions, SPMessage.OP_KEY_SHARE] + : [SPMessage.OP_KEY_SHARE], + allowedActions: params.allowedActions, + meta: { + private: { + content: encryptedOutgoingData(originatingContractID, encryptionKeyId, keyRequestReplyKeyS), + shareable: false + }, + keyRequest: { + ...(reference && { reference: encryptKeyRequestMetadata ? encryptedOutgoingData(originatingContractID, encryptionKeyId, reference) : reference }), + contractID: encryptKeyRequestMetadata ? encryptedOutgoingData(originatingContractID, encryptionKeyId, contractID) : contractID + } + }, + data: keyRequestReplyKeyP + }], + signingKeyId + }).catch((e) => { + console.error(`[chelonia] Error sending OP_KEY_ADD for ${originatingContractID} during key request to ${contractID}`, e); + throw e; + }); + const payload = { + contractID: originatingContractID, + height: rootState.contracts[originatingContractID].height, + replyWith: signedOutgoingData(originatingContractID, innerSigningKeyId, { + encryptionKeyId, + responseKey: encryptedOutgoingData(contractID, innerEncryptionKeyId, keyRequestReplyKeyS) + }, this.transientSecretKeys), + request: '*' + }; + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_REQUEST, + signedOutgoingData(contractID, params.signingKeyId, encryptKeyRequestMetadata + ? encryptedOutgoingData(contractID, innerEncryptionKeyId, payload) + : payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, { + ...hooks, + // We ensure that both messages are placed into the publish queue + prepublish: (...args) => { + return keyAddOp().then(() => hooks?.prepublish?.(...args)); + } + }); + return msg; + } + finally { + await sbp('chelonia/contract/release', contractID, { ephemeral: true }); + } + }, + 'chelonia/out/keyRequestResponse': async function (params) { + const { atomic, contractID, contractName, data, hooks, publishOptions } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const payload = data; + let message = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_REQUEST_SEEN, + signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + if (!atomic) { + message = await sbp('chelonia/private/out/publishEvent', message, publishOptions, hooks); + } + return message; + }, + 'chelonia/out/atomic': async function (params) { + const { contractID, contractName, data, hooks, publishOptions } = params; + const manifestHash = this.config.contracts.manifests[contractName]; + const contract = this.manifestToContract[manifestHash]?.contract; + if (!contract) { + throw new Error('Contract name not found'); + } + const payload = (await Promise.all(data.map(([selector, opParams]) => { + if (!['chelonia/out/actionEncrypted', 'chelonia/out/actionUnencrypted', 'chelonia/out/keyAdd', 'chelonia/out/keyDel', 'chelonia/out/keyUpdate', 'chelonia/out/keyRequestResponse', 'chelonia/out/keyShare'].includes(selector)) { + throw new Error('Selector not allowed in OP_ATOMIC: ' + selector); + } + return sbp(selector, { ...opParams, ...params, data: opParams.data, atomic: true }); + }))).flat().filter(Boolean).map((msg) => { + return [msg.opType(), msg.opValue()]; + }); + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_ATOMIC, + signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, hooks); + return msg; + }, + 'chelonia/out/protocolUpgrade': async function () { + }, + 'chelonia/out/propSet': async function () { + }, + 'chelonia/out/propDel': async function () { + }, + 'chelonia/out/encryptedOrUnencryptedPubMessage': function ({ contractID, innerSigningKeyId, encryptionKeyId, signingKeyId, data }) { + const serializedData = outputEncryptedOrUnencryptedMessage.call(this, { + contractID, + innerSigningKeyId, + encryptionKeyId, + signingKeyId, + data + }); + this.pubsub.pub(contractID, serializedData); + }, + // Note: This is a bare-bones function designed for precise control. In many + // situations, the `chelonia/kv/queuedSet` selector (in chelonia-utils.js) + // will be simpler and more appropriate to use. + // In most situations, you want to use some queuing strategy (which this + // selector doesn't provide) alongside writing to the KV store. Therefore, as + // a general rule, you shouldn't be calling this selector directly unless + // you're building a utility library or if you have very specific needs. In + // this case, see if `chelonia/kv/queuedSet` covers your needs. + // `data` is allowed to be falsy, in which case a fetch will occur first and + // the `onconflict` handler will be called. + 'chelonia/kv/set': async function (contractID, key, data, { ifMatch, innerSigningKeyId, encryptionKeyId, signingKeyId, maxAttempts, onconflict }) { + maxAttempts = maxAttempts ?? 3; + const url = `${this.config.connectionURL}/kv/${encodeURIComponent(contractID)}/${encodeURIComponent(key)}`; + const hasOnconflict = typeof onconflict === 'function'; + let response; + // The `resolveData` function is tasked with computing merged data, as in + // merging the existing stored values (after a conflict or initial fetch) + // and new data. The return value indicates whether there should be a new + // attempt at storing updated data (if `true`) or not (if `false`) + const resolveData = async () => { + let currentValue; + // Rationale: + // * response.ok could be the result of `GET` (no initial data) + // * 409 indicates a conflict because the height used is too old + // * 412 indicates a conflict (precondition failed) because the data + // on the KV store have been updated / is not what we expected + // All of these situations should trigger parsing the respinse and + // conlict resolution + if (response.ok || response.status === 409 || response.status === 412) { + const serializedDataText = await response.text(); + // We can get 409 even if there's no data on the server. We still need + // to call `onconflict` in this case, but we don't need to attempt to + // parse the response. + // This prevents this from failing in such cases, which can result in + // race conditions and data not being properly initialised. + // See + currentValue = serializedDataText + ? parseEncryptedOrUnencryptedMessage(this, { + contractID, + serializedData: JSON.parse(serializedDataText), + meta: key + }) + : undefined; + // Rationale: 404 and 410 both indicate that the store key doesn't exist. + // These are not treated as errors since we could still set the value. + } + else if (response.status !== 404 && response.status !== 410) { + throw new ChelErrorUnexpectedHttpResponseCode('[kv/set] Invalid response code: ' + response.status); + } + const result = await onconflict({ + contractID, + key, + failedData: data, + status: response.status, + // If no x-cid or etag header was returned, `ifMatch` would likely be + // returned as undefined, which will then use the `''` fallback value + // when writing. This allows 404 / 410 responses to work even if no + // etag is explicitly given + etag: response.headers.get('x-cid') || response.headers.get('etag'), + get currentData() { + return currentValue?.data; + }, + currentValue + }); + if (!result) + return false; + data = result[0]; + ifMatch = result[1]; + return true; + }; + for (;;) { + if (data !== undefined) { + const serializedData = outputEncryptedOrUnencryptedMessage.call(this, { + contractID, + innerSigningKeyId, + encryptionKeyId, + signingKeyId, + data: data, + meta: key + }); + response = await this.config.fetch(url, { + headers: new Headers([[ + 'authorization', buildShelterAuthorizationHeader.call(this, contractID) + ], [ + 'if-match', ifMatch || '""' + ] + ]), + method: 'POST', + body: JSON.stringify(serializedData), + signal: this.abortController.signal + }); + } + else { + if (!hasOnconflict) { + throw TypeError('onconflict required with empty data'); + } + // If no initial data provided, perform a GET `fetch` to get the current + // data and CID. Then, `onconflict` will be used to merge the current + // and new data. + response = await this.config.fetch(url, { + headers: new Headers([[ + 'authorization', buildShelterAuthorizationHeader.call(this, contractID) + ]]), + signal: this.abortController.signal + }); + // This is only for the initial case; the logic is replicated below + // for subsequent iterations that require conflic resolution. + if (await resolveData()) { + continue; + } + else { + break; + } + } + if (!response.ok) { + // Rationale: 409 and 412 indicate conflict resolution is needed + if (response.status === 409 || response.status === 412) { + if (--maxAttempts <= 0) { + throw new Error('kv/set conflict setting KV value'); + } + // Only retry if an onconflict handler exists to potentially resolve it + await delay(randomIntFromRange(0, 1500)); + if (hasOnconflict) { + if (await resolveData()) { + continue; + } + else { + break; + } + } + else { + // Can't resolve automatically if there's no conflict handler + throw new Error(`kv/set failed with status ${response.status} and no onconflict handler was provided`); + } + } + throw new ChelErrorUnexpectedHttpResponseCode('kv/set invalid response status: ' + response.status); + } + break; + } + }, + 'chelonia/kv/get': async function (contractID, key) { + const response = await this.config.fetch(`${this.config.connectionURL}/kv/${encodeURIComponent(contractID)}/${encodeURIComponent(key)}`, { + headers: new Headers([[ + 'authorization', buildShelterAuthorizationHeader.call(this, contractID) + ]]), + signal: this.abortController.signal + }); + if (response.status === 404) { + return null; + } + if (!response.ok) { + throw new Error('Invalid response status: ' + response.status); + } + const data = await response.json(); + return parseEncryptedOrUnencryptedMessage(this, { + contractID, + serializedData: data, + meta: key + }); + }, + // To set filters for a contract, call with `filter` set to an array of KV + // keys to receive updates for over the WebSocket. An empty array means that + // no KV updates will be sent. + // Calling with a single argument (the contract ID) will remove filters, + // meaning that KV updates will be sent for _any_ KV key. + // The last call takes precedence, so, for example, calling with filter + // set to `['foo', 'bar']` and then with `['baz']` means that KV updates will + // be received for `baz` only, not for `foo`, `bar` or any other keys. + 'chelonia/kv/setFilter': function (contractID, filter) { + this.pubsub.setKvFilter(contractID, filter); + }, + 'chelonia/parseEncryptedOrUnencryptedDetachedMessage': function ({ contractID, serializedData, meta }) { + return parseEncryptedOrUnencryptedMessage(this, { + contractID, + serializedData, + meta + }); + } +}); +function contractNameFromAction(action) { + const regexResult = ACTION_REGEX.exec(action); + const contractName = regexResult?.[2]; + if (!contractName) + throw new Error(`Poorly named action '${action}': missing contract name.`); + return contractName; +} +function outputEncryptedOrUnencryptedMessage({ contractID, innerSigningKeyId, encryptionKeyId, signingKeyId, data, meta }) { + const state = sbp(this.config.stateSelector)[contractID]; + const signedMessage = innerSigningKeyId + ? (state._vm.authorizedKeys[innerSigningKeyId] && state._vm.authorizedKeys[innerSigningKeyId]?._notAfterHeight == null) + ? signedOutgoingData(contractID, innerSigningKeyId, data, this.transientSecretKeys) + : signedOutgoingDataWithRawKey(this.transientSecretKeys[innerSigningKeyId], data) + : data; + const payload = !encryptionKeyId + ? signedMessage + : encryptedOutgoingData(contractID, encryptionKeyId, signedMessage); + const message = signedOutgoingData(contractID, signingKeyId, payload, this.transientSecretKeys); + const rootState = sbp(this.config.stateSelector); + const height = String(rootState.contracts[contractID].height); + const serializedData = { ...message.serialize((meta ?? '') + height), height }; + return serializedData; +} +function parseEncryptedOrUnencryptedMessage(ctx, { contractID, serializedData, meta }) { + if (!serializedData) { + throw new TypeError('[chelonia] parseEncryptedOrUnencryptedMessage: serializedData is required'); + } + const state = sbp(ctx.config.stateSelector)[contractID]; + const numericHeight = parseInt(serializedData.height); + const rootState = sbp(ctx.config.stateSelector); + const currentHeight = rootState.contracts[contractID].height; + if (!(numericHeight >= 0) || !(numericHeight <= currentHeight)) { + throw new Error(`[chelonia] parseEncryptedOrUnencryptedMessage: Invalid height ${serializedData.height}; it must be between 0 and ${currentHeight}`); + } + // Additional data used for verification + const aad = (meta ?? '') + serializedData.height; + const v = signedIncomingData(contractID, state, serializedData, numericHeight, aad, (message) => { + return maybeEncryptedIncomingData(contractID, state, message, numericHeight, ctx.transientSecretKeys, aad, undefined); + }); + // Cached values + let encryptionKeyId; + let innerSigningKeyId; + // Lazy unwrap function + // We don't use `unwrapMaybeEncryptedData`, which would almost do the same, + // because it swallows decryption errors, which we want to propagate to + // consumers of the KV API. + const unwrap = (() => { + let result; + return () => { + if (!result) { + try { + let unwrapped; + // First, we unwrap the signed data + unwrapped = v.valueOf(); + // If this is encrypted data, attempt decryption + if (isEncryptedData(unwrapped)) { + encryptionKeyId = unwrapped.encryptionKeyId; + unwrapped = unwrapped.valueOf(); + // There could be inner signed data (inner signatures), so we unwrap + // that too + if (isSignedData(unwrapped)) { + innerSigningKeyId = unwrapped.signingKeyId; + unwrapped = unwrapped.valueOf(); + } + else { + innerSigningKeyId = null; + } + } + else { + encryptionKeyId = null; + innerSigningKeyId = null; + } + result = [unwrapped]; + } + catch (e) { + result = [undefined, e]; + } + } + if (result.length === 2) { + throw result[1]; + } + return result[0]; + }; + })(); + const result = { + get contractID() { + return contractID; + }, + get innerSigningKeyId() { + if (innerSigningKeyId === undefined) { + try { + unwrap(); + } + catch { + // We're not interested in an error, that'd only be for the 'data' + // accessor. + } + } + return innerSigningKeyId; + }, + get encryptionKeyId() { + if (encryptionKeyId === undefined) { + try { + unwrap(); + } + catch { + // We're not interested in an error, that'd only be for the 'data' + // accessor. + } + } + return encryptionKeyId; + }, + get signingKeyId() { + return v.signingKeyId; + }, + get data() { + return unwrap(); + }, + get signingContractID() { + return getContractIDfromKeyId(contractID, result.signingKeyId, state); + }, + get innerSigningContractID() { + return getContractIDfromKeyId(contractID, result.innerSigningKeyId, state); + } + }; + return result; +} +async function outEncryptedOrUnencryptedAction(opType, params) { + const { atomic, action, contractID, data, hooks, publishOptions } = params; + const contractName = contractNameFromAction(action); + const manifestHash = this.config.contracts.manifests[contractName]; + const { contract } = this.manifestToContract[manifestHash]; + const state = contract.state(contractID); + const meta = await contract.metadata.create(); + const unencMessage = { action, data, meta }; + const signedMessage = params.innerSigningKeyId + ? (state._vm.authorizedKeys[params.innerSigningKeyId] && state._vm.authorizedKeys[params.innerSigningKeyId]?._notAfterHeight == null) + ? signedOutgoingData(contractID, params.innerSigningKeyId, unencMessage, this.transientSecretKeys) + : signedOutgoingDataWithRawKey(this.transientSecretKeys[params.innerSigningKeyId], unencMessage) + : unencMessage; + if (opType === SPMessage.OP_ACTION_ENCRYPTED && !params.encryptionKeyId) { + throw new Error('OP_ACTION_ENCRYPTED requires an encryption key ID be given'); + } + if (params.encryptionKey) { + if (params.encryptionKeyId !== keyId(params.encryptionKey)) { + throw new Error('OP_ACTION_ENCRYPTED raw encryption key does not match encryptionKeyId'); + } + } + const payload = opType === SPMessage.OP_ACTION_UNENCRYPTED + ? signedMessage + : params.encryptionKey + ? encryptedOutgoingDataWithRawKey(params.encryptionKey, signedMessage) + : encryptedOutgoingData(contractID, params.encryptionKeyId, signedMessage); + let message = SPMessage.createV1_0({ + contractID, + op: [ + opType, + signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }); + if (!atomic) { + message = await sbp('chelonia/private/out/publishEvent', message, publishOptions, hooks); + } + return message; +} +// The gettersProxy is what makes Vue-like getters possible. In other words, +// we want to make sure that the getter functions that we defined in each +// contract get passed the 'state' when a getter is accessed. +// We pass in the state by creating a Proxy object that does it for us. +// This allows us to maintain compatibility with Vue.js and integrate +// the contract getters into the Vue-facing getters. +// For this to work, other getters need to be implemented relative to a +// 'current' getter that returns the state itself. For example: +// ``` +// { +// currentMailboxState: (state) => state, // In the contract +// currentMailboxState: (state) => state[state.currentMailboxId], // In the app +// lastMessage: (state, getters) => // Shared getter for both app and contract +// getters.currentMailboxState.messages.slice(-1).pop() +// } +// ``` +function gettersProxy(state, getters) { + const proxyGetters = new Proxy({}, { + get(_target, prop) { + return getters[prop](state, proxyGetters); + } + }); + return { getters: proxyGetters }; +} +sbp('sbp/domains/lock', ['chelonia']); diff --git a/dist/esm/constants.d.mts b/dist/esm/constants.d.mts new file mode 100644 index 0000000..7299f7b --- /dev/null +++ b/dist/esm/constants.d.mts @@ -0,0 +1,5 @@ +export declare const INVITE_STATUS: { + REVOKED: string; + VALID: string; + USED: string; +}; diff --git a/dist/esm/constants.mjs b/dist/esm/constants.mjs new file mode 100644 index 0000000..08c70fc --- /dev/null +++ b/dist/esm/constants.mjs @@ -0,0 +1,5 @@ +export const INVITE_STATUS = { + REVOKED: 'revoked', + VALID: 'valid', + USED: 'used' +}; diff --git a/dist/esm/db.d.mts b/dist/esm/db.d.mts new file mode 100644 index 0000000..3b03b48 --- /dev/null +++ b/dist/esm/db.d.mts @@ -0,0 +1,7 @@ +import '@sbp/okturtles.data'; +import '@sbp/okturtles.eventqueue'; +export declare const checkKey: (key: string) => void; +export declare const parsePrefixableKey: (key: string) => [string, string]; +export declare const prefixHandlers: Record unknown>; +declare const _default: string[]; +export default _default; diff --git a/dist/esm/db.mjs b/dist/esm/db.mjs new file mode 100644 index 0000000..a6d49cd --- /dev/null +++ b/dist/esm/db.mjs @@ -0,0 +1,214 @@ +import '@sbp/okturtles.data'; +import '@sbp/okturtles.eventqueue'; +import sbp from '@sbp/sbp'; +import { SPMessage } from './SPMessage.mjs'; +import { ChelErrorDBBadPreviousHEAD, ChelErrorDBConnection } from './errors.mjs'; +const headPrefix = 'head='; +const getContractIdFromLogHead = (key) => { + if (!key.startsWith(headPrefix)) + return; + return key.slice(headPrefix.length); +}; +const getLogHead = (contractID) => `${headPrefix}${contractID}`; +export const checkKey = (key) => { + // Disallow unprintable characters, slashes, and TAB. + // Also disallow characters not allowed by Windows: + // + if (/[\x00-\x1f\x7f\t\\/<>:"|?*]/.test(key)) { // eslint-disable-line no-control-regex + throw new Error(`bad key: ${JSON.stringify(key)}`); + } +}; +export const parsePrefixableKey = (key) => { + const i = key.indexOf(':'); + if (i === -1) { + return ['', key]; + } + const prefix = key.slice(0, i + 1); + if (prefix in prefixHandlers) { + return [prefix, key.slice(prefix.length)]; + } + throw new ChelErrorDBConnection(`Unknown prefix in '${key}'.`); +}; +export const prefixHandlers = { + // Decode buffers, but don't transform other values. + '': (value) => Buffer.isBuffer(value) ? value.toString('utf8') : value, + 'any:': (value) => value + /* + // 2025-03-24: Commented out because it's not used; currently, only `any:` + // is used in the `/file` route. + // Throw if the value if not a buffer. + 'blob:': value => { + if (Buffer.isBuffer(value)) { + return value + } + throw new ChelErrorDBConnection('Unexpected value: expected a buffer.') + } + */ +}; +// NOTE: To enable persistence of log use 'sbp/selectors/overwrite' +// to overwrite the following selectors: +sbp('sbp/selectors/unsafe', ['chelonia.db/get', 'chelonia.db/set', 'chelonia.db/delete']); +// NOTE: MAKE SURE TO CALL 'sbp/selectors/lock' after overwriting them! +// When using a lightweight client, the client doesn't keep a copy of messages +// in the DB. Therefore, `chelonia.db/*` selectors are mostly turned into no-ops. +// The `chelonia.db/get` selector is slightly more complex than a no-op, because +// Chelonia relies on being able to find the current contract head. To overcome +// this, if a head is requested, 'chelonia.db/get' returns information from +// the Chelonia contract state. +const dbPrimitiveSelectors = process.env.LIGHTWEIGHT_CLIENT === 'true' + ? { + 'chelonia.db/get': function (key) { + const id = getContractIdFromLogHead(key); + if (!id) + return Promise.resolve(); + const state = sbp('chelonia/rootState').contracts[id]; + const value = (state?.HEAD + ? JSON.stringify({ + HEAD: state.HEAD, + height: state.height, + previousKeyOp: state.previousKeyOp + }) + : undefined); + return Promise.resolve(value); + }, + 'chelonia.db/set': function () { + return Promise.resolve(); + }, + 'chelonia.db/delete': function () { + return Promise.resolve(true); + } + } + : { + // eslint-disable-next-line require-await + 'chelonia.db/get': async function (prefixableKey) { + const [prefix, key] = parsePrefixableKey(prefixableKey); + const value = sbp('okTurtles.data/get', key); + if (value === undefined) { + return; + } + return prefixHandlers[prefix](value); + }, + // eslint-disable-next-line require-await + 'chelonia.db/set': async function (key, value) { + checkKey(key); + return sbp('okTurtles.data/set', key, value); + }, + // eslint-disable-next-line require-await + 'chelonia.db/delete': async function (key) { + return sbp('okTurtles.data/delete', key); + } + }; +export default sbp('sbp/selectors/register', { + ...dbPrimitiveSelectors, + 'chelonia/db/getEntryMeta': async (contractID, height) => { + const entryMetaJson = await sbp('chelonia.db/get', `_private_hidx=${contractID}#${height}`); + if (!entryMetaJson) + return; + return JSON.parse(entryMetaJson); + }, + 'chelonia/db/setEntryMeta': async (contractID, height, entryMeta) => { + const entryMetaJson = JSON.stringify(entryMeta); + await sbp('chelonia.db/set', `_private_hidx=${contractID}#${height}`, entryMetaJson); + }, + 'chelonia/db/latestHEADinfo': async (contractID) => { + const r = await sbp('chelonia.db/get', getLogHead(contractID)); + return r && JSON.parse(r); + }, + 'chelonia/db/deleteLatestHEADinfo': (contractID) => { + return sbp('chelonia.db/set', getLogHead(contractID), ''); + }, + 'chelonia/db/getEntry': async function (hash) { + try { + const value = await sbp('chelonia.db/get', hash); + if (!value) + throw new Error(`no entry for ${hash}!`); + return SPMessage.deserialize(value, this.transientSecretKeys, undefined, this.config.unwrapMaybeEncryptedData); + } + catch (e) { + throw new ChelErrorDBConnection(`${e.name} during getEntry: ${e.message}`); + } + }, + 'chelonia/db/addEntry': function (entry) { + // because addEntry contains multiple awaits - we want to make sure it gets executed + // "atomically" to minimize the chance of a contract fork + return sbp('okTurtles.eventQueue/queueEvent', `chelonia/db/${entry.contractID()}`, [ + 'chelonia/private/db/addEntry', entry + ]); + }, + // NEVER call this directly yourself! _always_ call 'chelonia/db/addEntry' instead + 'chelonia/private/db/addEntry': async function (entry) { + try { + const { previousHEAD: entryPreviousHEAD, previousKeyOp: entryPreviousKeyOp, height: entryHeight } = entry.head(); + const contractID = entry.contractID(); + if (await sbp('chelonia.db/get', entry.hash())) { + console.warn(`[chelonia.db] entry exists: ${entry.hash()}`); + return entry.hash(); + } + const HEADinfo = await sbp('chelonia/db/latestHEADinfo', contractID); + if (!entry.isFirstMessage()) { + if (!HEADinfo) { + throw new Error(`No latest HEAD for ${contractID} when attempting to process entry with previous HEAD ${entryPreviousHEAD} at height ${entryHeight}`); + } + const { HEAD: contractHEAD, previousKeyOp: contractPreviousKeyOp, height: contractHeight } = HEADinfo; + if (entryPreviousHEAD !== contractHEAD) { + console.warn(`[chelonia.db] bad previousHEAD: ${entryPreviousHEAD}! Expected: ${contractHEAD} for contractID: ${contractID}`); + throw new ChelErrorDBBadPreviousHEAD(`bad previousHEAD: ${entryPreviousHEAD}. Expected ${contractHEAD} for contractID: ${contractID}`); + } + else if (entryPreviousKeyOp !== contractPreviousKeyOp) { + console.error(`[chelonia.db] bad previousKeyOp: ${entryPreviousKeyOp}! Expected: ${contractPreviousKeyOp} for contractID: ${contractID}`); + throw new ChelErrorDBBadPreviousHEAD(`bad previousKeyOp: ${entryPreviousKeyOp}. Expected ${contractPreviousKeyOp} for contractID: ${contractID}`); + } + else if (!Number.isSafeInteger(entryHeight) || entryHeight !== (contractHeight + 1)) { + console.error(`[chelonia.db] bad height: ${entryHeight}! Expected: ${contractHeight + 1} for contractID: ${contractID}`); + throw new ChelErrorDBBadPreviousHEAD(`[chelonia.db] bad height: ${entryHeight}! Expected: ${contractHeight + 1} for contractID: ${contractID}`); + } + } + else { + if (HEADinfo) { + console.error(`[chelonia.db] bad previousHEAD: ${entryPreviousHEAD}! Expected: for contractID: ${contractID}`); + throw new ChelErrorDBBadPreviousHEAD(`bad previousHEAD: ${entryPreviousHEAD}. Expected for contractID: ${contractID}`); + } + else if (entryHeight !== 0) { + console.error(`[chelonia.db] bad height: ${entryHeight}! Expected: 0 for contractID: ${contractID}`); + throw new ChelErrorDBBadPreviousHEAD(`[chelonia.db] bad height: ${entryHeight}! Expected: 0 for contractID: ${contractID}`); + } + } + await sbp('chelonia.db/set', entry.hash(), entry.serialize()); + await sbp('chelonia.db/set', getLogHead(contractID), JSON.stringify({ + HEAD: entry.hash(), + previousKeyOp: entry.isKeyOp() ? entry.hash() : entry.previousKeyOp(), + height: entry.height() + })); + console.debug(`[chelonia.db] HEAD for ${contractID} updated to:`, entry.hash()); + await sbp('chelonia/db/setEntryMeta', contractID, entryHeight, { + // The hash is used for reverse lookups (height to CID) + hash: entry.hash(), + // The date isn't currently used, but will be used for filtering messages + date: new Date().toISOString(), + // isKeyOp is used for filtering messages (the actual filtering is + // done more efficiently a separate index key, but `isKeyOp` allows + // us to bootstrap this process without having to load the full message) + // The separate index key bears the prefix `_private_keyop_idx_`. + ...(entry.isKeyOp() && { isKeyOp: true }) + }); + return entry.hash(); + } + catch (e) { + if (e.name.includes('ErrorDB')) { + throw e; // throw the specific type of ErrorDB instance + } + throw new ChelErrorDBConnection(`${e.name} during addEntry: ${e.message}`); + } + }, + 'chelonia/db/lastEntry': async function (contractID) { + try { + const latestHEADinfo = await sbp('chelonia/db/latestHEADinfo', contractID); + if (!latestHEADinfo) + throw new Error(`contract ${contractID} has no latest hash!`); + return sbp('chelonia/db/getEntry', latestHEADinfo.HEAD); + } + catch (e) { + throw new ChelErrorDBConnection(`${e.name} during lastEntry: ${e.message}`); + } + } +}); diff --git a/dist/esm/encryptedData.d.mts b/dist/esm/encryptedData.d.mts new file mode 100644 index 0000000..ea23897 --- /dev/null +++ b/dist/esm/encryptedData.d.mts @@ -0,0 +1,22 @@ +import type { Key } from '@chelonia/crypto'; +import type { ChelContractState } from './types.mjs'; +export interface EncryptedData { + encryptionKeyId: string; + valueOf: () => T; + serialize: (additionalData?: string) => [string, string]; + toString: (additionalData?: string) => string; + toJSON?: () => [string, string]; +} +export declare const isEncryptedData: (o: unknown) => o is EncryptedData; +export declare const encryptedOutgoingData: (stateOrContractID: string | ChelContractState, eKeyId: string, data: T) => EncryptedData; +export declare const encryptedOutgoingDataWithRawKey: (key: Key, data: T) => EncryptedData; +export declare const encryptedIncomingData: (contractID: string, state: ChelContractState, data: [string, string], height: number, additionalKeys?: Record, additionalData?: string, validatorFn?: (v: T, id: string) => void) => EncryptedData; +export declare const encryptedIncomingForeignData: (contractID: string, _0: never, data: [string, string], _1: never, additionalKeys?: Record, additionalData?: string, validatorFn?: (v: T, id: string) => void) => EncryptedData; +export declare const encryptedIncomingDataWithRawKey: (key: Key, data: [string, string], additionalData?: string) => EncryptedData; +export declare const encryptedDataKeyId: (data: unknown) => string; +export declare const isRawEncryptedData: (data: unknown) => data is [string, string]; +export declare const unwrapMaybeEncryptedData: (data: T | EncryptedData) => { + encryptionKeyId: string | null; + data: T; +} | undefined; +export declare const maybeEncryptedIncomingData: (contractID: string, state: ChelContractState, data: T | [string, string], height: number, additionalKeys?: Record, additionalData?: string, validatorFn?: (v: T, id: string) => void) => T | EncryptedData; diff --git a/dist/esm/encryptedData.mjs b/dist/esm/encryptedData.mjs new file mode 100644 index 0000000..bf7e4e0 --- /dev/null +++ b/dist/esm/encryptedData.mjs @@ -0,0 +1,318 @@ +import { decrypt, deserializeKey, encrypt, keyId, serializeKey } from '@chelonia/crypto'; +import sbp from '@sbp/sbp'; +import { has } from 'turtledash'; +import { ChelErrorDecryptionError, ChelErrorDecryptionKeyNotFound, ChelErrorUnexpected } from './errors.mjs'; +import { isRawSignedData, signedIncomingData } from './signedData.mjs'; +const rootStateFn = () => sbp('chelonia/rootState'); +// `proto` & `wrapper` are utilities for `isEncryptedData` +const proto = Object.create(null, { + _isEncryptedData: { + value: true + } +}); +const wrapper = (o) => { + return Object.setPrototypeOf(o, proto); +}; +// `isEncryptedData` will return true for objects created by the various +// `encrypt*Data` functions. It's meant to implement functionality equivalent +// to `o instanceof EncryptedData` +export const isEncryptedData = (o) => { + return !!o && !!Object.getPrototypeOf(o)?._isEncryptedData; +}; +// TODO: Check for permissions and allowedActions; this requires passing some +// additional context +const encryptData = function (stateOrContractID, eKeyId, data, additionalData) { + const state = typeof stateOrContractID === 'string' ? rootStateFn()[stateOrContractID] : stateOrContractID; + // Has the key been revoked? If so, attempt to find an authorized key by the same name + const designatedKey = state?._vm?.authorizedKeys?.[eKeyId]; + if (!designatedKey?.purpose.includes('enc')) { + throw new Error(`Encryption key ID ${eKeyId} is missing or is missing encryption purpose`); + } + if (designatedKey._notAfterHeight != null) { + const name = state._vm.authorizedKeys[eKeyId].name; + const newKeyId = Object.values(state._vm?.authorizedKeys).find((v) => v._notAfterHeight == null && v.name === name && v.purpose.includes('enc'))?.id; + if (!newKeyId) { + throw new Error(`Encryption key ID ${eKeyId} has been revoked and no new key exists by the same name (${name})`); + } + eKeyId = newKeyId; + } + const key = state._vm?.authorizedKeys?.[eKeyId].data; + if (!key) { + throw new Error(`Missing encryption key ${eKeyId}`); + } + const deserializedKey = typeof key === 'string' ? deserializeKey(key) : key; + return [ + keyId(deserializedKey), + encrypt(deserializedKey, JSON.stringify(data, (_, v) => { + if (v && has(v, 'serialize') && typeof v.serialize === 'function') { + if (v.serialize.length === 1) { + return v.serialize(additionalData); + } + else { + return v.serialize(); + } + } + return v; + }), additionalData) + ]; +}; +// TODO: Check for permissions and allowedActions; this requires passing the +// entire SPMessage +const decryptData = function (state, height, data, additionalKeys, additionalData, validatorFn) { + if (!state) { + throw new ChelErrorDecryptionError('Missing contract state'); + } + // Compatibility with signedData (composed signed + encrypted data) + if (typeof data.valueOf === 'function') + data = data.valueOf(); + if (!isRawEncryptedData(data)) { + throw new ChelErrorDecryptionError('Invalid message format'); + } + const [eKeyId, message] = data; + const key = additionalKeys[eKeyId]; + if (!key) { + throw new ChelErrorDecryptionKeyNotFound(`Key ${eKeyId} not found`, { cause: eKeyId }); + } + // height as NaN is used to allow checking for revokedKeys as well as + // authorizedKeys when decrypting data. This is normally inappropriate because + // revoked keys should be considered compromised and not used for encrypting + // new data + // However, OP_KEY_SHARE may include data encrypted with some other contract's + // keys when a key rotation is done. This is done, along with OP_ATOMIC and + // OP_KEY_UPDATE to rotate keys in a contract while allowing member contracts + // to retrieve and use the new key material. + // In such scenarios, since the keys really live in that other contract, it is + // impossible to know if the keys had been revoked in the 'source' contract + // at the time the key rotation was done. This is also different from foreign + // keys because these encryption keys are not necessarily authorized in the + // contract issuing OP_KEY_SHARE, and what is important is to refer to the + // (keys in the) foreign contract explicitly, as an alternative to sending + // an OP_KEY_SHARE to that contract. + // Using revoked keys represents some security risk since, as mentioned, they + // should generlly be considered compromised. However, in the scenario above + // we can trust that the party issuing OP_KEY_SHARE is not maliciously using + // old (revoked) keys, because there is little to be gained from not doing + // this. If that party's intention were to leak or compromise keys, they can + // already do so by other means, since they have access to the raw secrets + // that OP_KEY_SHARE is meant to protect. Hence, this attack does not open up + // any new attack vectors or venues that were not already available using + // different means. + const designatedKey = state._vm?.authorizedKeys?.[eKeyId]; + if (!designatedKey || (height > designatedKey._notAfterHeight) || (height < designatedKey._notBeforeHeight) || !designatedKey.purpose.includes('enc')) { + throw new ChelErrorUnexpected(`Key ${eKeyId} is unauthorized or expired for the current contract`); + } + const deserializedKey = typeof key === 'string' ? deserializeKey(key) : key; + try { + const result = JSON.parse(decrypt(deserializedKey, message, additionalData)); + if (typeof validatorFn === 'function') + validatorFn(result, eKeyId); + return result; + } + catch (e) { + throw new ChelErrorDecryptionError(e?.message || e); + } +}; +export const encryptedOutgoingData = (stateOrContractID, eKeyId, data) => { + if (!stateOrContractID || data === undefined || !eKeyId) + throw new TypeError('Invalid invocation'); + const boundStringValueFn = encryptData.bind(null, stateOrContractID, eKeyId, data); + return wrapper({ + get encryptionKeyId() { + return eKeyId; + }, + get serialize() { + return (additionalData) => boundStringValueFn(additionalData || ''); + }, + get toString() { + return (additionalData) => JSON.stringify(this.serialize(additionalData)); + }, + get valueOf() { + return () => data; + } + }); +}; +// Used for OP_CONTRACT as a state does not yet exist +export const encryptedOutgoingDataWithRawKey = (key, data) => { + if (data === undefined || !key) + throw new TypeError('Invalid invocation'); + const eKeyId = keyId(key); + const state = { + _vm: { + authorizedKeys: { + [eKeyId]: { + purpose: ['enc'], + data: serializeKey(key, false), + _notBeforeHeight: 0, + _notAfterHeight: undefined + } + } + } + }; + const boundStringValueFn = encryptData.bind(null, state, eKeyId, data); + return wrapper({ + get encryptionKeyId() { + return eKeyId; + }, + get serialize() { + return (additionalData) => boundStringValueFn(additionalData || ''); + }, + get toString() { + return (additionalData) => JSON.stringify(this.serialize(additionalData)); + }, + get valueOf() { + return () => data; + } + }); +}; +export const encryptedIncomingData = (contractID, state, data, height, additionalKeys, additionalData, validatorFn) => { + let decryptedValue; + const decryptedValueFn = () => { + if (decryptedValue) { + return decryptedValue; + } + if (!state || !additionalKeys) { + const rootState = rootStateFn(); + state = state || rootState[contractID]; + additionalKeys = additionalKeys ?? rootState.secretKeys; + } + decryptedValue = decryptData(state, height, data, additionalKeys, additionalData || '', validatorFn); + if (isRawSignedData(decryptedValue)) { + decryptedValue = signedIncomingData(contractID, state, decryptedValue, height, additionalData || ''); + } + return decryptedValue; + }; + return wrapper({ + get encryptionKeyId() { + return encryptedDataKeyId(data); + }, + get serialize() { + return () => data; + }, + get toString() { + return () => JSON.stringify(this.serialize()); + }, + get valueOf() { + return decryptedValueFn; + }, + get toJSON() { + return this.serialize; + } + }); +}; +export const encryptedIncomingForeignData = (contractID, _0, data, _1, additionalKeys, additionalData, validatorFn) => { + let decryptedValue; + const decryptedValueFn = () => { + if (decryptedValue) { + return decryptedValue; + } + const rootState = rootStateFn(); + const state = rootState[contractID]; + decryptedValue = decryptData(state, NaN, data, additionalKeys ?? rootState.secretKeys, additionalData || '', validatorFn); + if (isRawSignedData(decryptedValue)) { + // TODO: Specify height + return signedIncomingData(contractID, state, decryptedValue, NaN, additionalData || ''); + } + return decryptedValue; + }; + return wrapper({ + get encryptionKeyId() { + return encryptedDataKeyId(data); + }, + get serialize() { + return () => data; + }, + get toString() { + return () => JSON.stringify(this.serialize()); + }, + get valueOf() { + return decryptedValueFn; + }, + get toJSON() { + return this.serialize; + } + }); +}; +export const encryptedIncomingDataWithRawKey = (key, data, additionalData) => { + if (data === undefined || !key) + throw new TypeError('Invalid invocation'); + let decryptedValue; + const eKeyId = keyId(key); + const decryptedValueFn = () => { + if (decryptedValue) { + return decryptedValue; + } + const state = { + _vm: { + authorizedKeys: { + [eKeyId]: { + purpose: ['enc'], + data: serializeKey(key, false), + _notBeforeHeight: 0, + _notAfterHeight: undefined + } + } + } + }; + decryptedValue = decryptData(state, NaN, data, { [eKeyId]: key }, additionalData || ''); + return decryptedValue; + }; + return wrapper({ + get encryptionKeyId() { + return encryptedDataKeyId(data); + }, + get serialize() { + return () => data; + }, + get toString() { + return () => JSON.stringify(this.serialize()); + }, + get valueOf() { + return decryptedValueFn; + }, + get toJSON() { + return this.serialize; + } + }); +}; +export const encryptedDataKeyId = (data) => { + if (!isRawEncryptedData(data)) { + throw new ChelErrorDecryptionError('Invalid message format'); + } + return data[0]; +}; +export const isRawEncryptedData = (data) => { + if (!Array.isArray(data) || data.length !== 2 || data.map(v => typeof v).filter(v => v !== 'string').length !== 0) { + return false; + } + return true; +}; +export const unwrapMaybeEncryptedData = (data) => { + if (data == null) + return; + if (isEncryptedData(data)) { + try { + return { + encryptionKeyId: data.encryptionKeyId, + data: data.valueOf() + }; + } + catch (e) { + console.warn('unwrapMaybeEncryptedData: Unable to decrypt', e); + } + } + else { + return { + encryptionKeyId: null, + data + }; + } +}; +export const maybeEncryptedIncomingData = (contractID, state, data, height, additionalKeys, additionalData, validatorFn) => { + if (isRawEncryptedData(data)) { + return encryptedIncomingData(contractID, state, data, height, additionalKeys, additionalData, validatorFn); + } + else { + validatorFn?.(data, ''); + return data; + } +}; diff --git a/dist/esm/encryptedData.test.d.mts b/dist/esm/encryptedData.test.d.mts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/dist/esm/encryptedData.test.d.mts @@ -0,0 +1 @@ +export {}; diff --git a/dist/esm/encryptedData.test.mjs b/dist/esm/encryptedData.test.mjs new file mode 100644 index 0000000..0f58d7f --- /dev/null +++ b/dist/esm/encryptedData.test.mjs @@ -0,0 +1,68 @@ +import { CURVE25519XSALSA20POLY1305, keygen, keyId, serializeKey } from '@chelonia/crypto'; +import * as assert from 'node:assert'; +import { describe, it } from 'node:test'; +import { encryptedIncomingData, encryptedOutgoingData, encryptedOutgoingDataWithRawKey } from './encryptedData.mjs'; +describe('Encrypted data API', () => { + it('should encrypt outgoing data and decrypt incoming data when using a key from the state', () => { + const key = keygen(CURVE25519XSALSA20POLY1305); + const id = keyId(key); + const state = { + _vm: { + authorizedKeys: { + [id]: { + name: 'name', + purpose: ['enc'], + data: serializeKey(key, false) + } + } + } + }; + const encryptedData = encryptedOutgoingData(state, id, 'foo'); + assert.ok(typeof encryptedData === 'object'); + assert.ok(typeof encryptedData.toString === 'function'); + assert.ok(typeof encryptedData.serialize === 'function'); + assert.ok(typeof encryptedData.valueOf === 'function'); + assert.equal(encryptedData.valueOf(), 'foo'); + const stringifiedEncryptedData = encryptedData.toString(''); + assert.notEqual(stringifiedEncryptedData, 'foo'); + assert.notEqual(encryptedData.serialize(''), 'foo'); + const incoming = encryptedIncomingData('', state, JSON.parse(stringifiedEncryptedData), 0, { + [id]: key + }); + assert.ok(typeof incoming === 'object'); + assert.ok(typeof incoming.toString === 'function'); + assert.ok(typeof incoming.serialize === 'function'); + assert.ok(typeof incoming.valueOf === 'function'); + assert.deepEqual(incoming.toJSON(), JSON.parse(stringifiedEncryptedData)); + assert.equal(incoming.toString(), stringifiedEncryptedData); + assert.equal(incoming.valueOf(), 'foo'); + }); + it('should encrypt outgoing data and decrypt incoming data when using a raw key', () => { + const key = keygen(CURVE25519XSALSA20POLY1305); + const id = keyId(key); + const encryptedData = encryptedOutgoingDataWithRawKey(key, 'foo'); + assert.ok(typeof encryptedData === 'object'); + assert.ok(typeof encryptedData.toString === 'function'); + assert.ok(typeof encryptedData.serialize === 'function'); + assert.ok(typeof encryptedData.valueOf === 'function'); + assert.equal(encryptedData.valueOf(), 'foo'); + const serializedEncryptedData = encryptedData.serialize(); + assert.notEqual(serializedEncryptedData, 'foo'); + const incoming = encryptedIncomingData('', { + _vm: { + authorizedKeys: { + [id]: { + purpose: ['enc'] + } + } + } + }, serializedEncryptedData, 0, { [id]: key }); + assert.ok(typeof incoming === 'object'); + assert.ok(typeof incoming.toString === 'function'); + assert.ok(typeof incoming.serialize === 'function'); + assert.ok(typeof incoming.valueOf === 'function'); + assert.equal(incoming.valueOf(), 'foo'); + assert.deepEqual(incoming.toJSON(), serializedEncryptedData); + assert.equal(incoming.toString(), JSON.stringify(serializedEncryptedData)); + }); +}); diff --git a/dist/esm/errors.d.mts b/dist/esm/errors.d.mts new file mode 100644 index 0000000..554550a --- /dev/null +++ b/dist/esm/errors.d.mts @@ -0,0 +1,17 @@ +export declare const ChelErrorGenerator: (name: string, base?: ErrorConstructor) => ErrorConstructor; +export declare const ChelErrorWarning: typeof Error; +export declare const ChelErrorAlreadyProcessed: typeof Error; +export declare const ChelErrorDBBadPreviousHEAD: typeof Error; +export declare const ChelErrorDBConnection: typeof Error; +export declare const ChelErrorUnexpected: typeof Error; +export declare const ChelErrorKeyAlreadyExists: typeof Error; +export declare const ChelErrorUnrecoverable: typeof Error; +export declare const ChelErrorForkedChain: typeof Error; +export declare const ChelErrorDecryptionError: typeof Error; +export declare const ChelErrorDecryptionKeyNotFound: typeof Error; +export declare const ChelErrorSignatureError: typeof Error; +export declare const ChelErrorSignatureKeyUnauthorized: typeof Error; +export declare const ChelErrorSignatureKeyNotFound: typeof Error; +export declare const ChelErrorFetchServerTimeFailed: typeof Error; +export declare const ChelErrorUnexpectedHttpResponseCode: typeof Error; +export declare const ChelErrorResourceGone: typeof Error; diff --git a/dist/esm/errors.mjs b/dist/esm/errors.mjs new file mode 100644 index 0000000..cd0c85a --- /dev/null +++ b/dist/esm/errors.mjs @@ -0,0 +1,31 @@ +// ugly boilerplate because JavaScript is stupid +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error#Custom_Error_Types +export const ChelErrorGenerator = (name, base = Error) => (class extends base { + constructor(...params) { + super(...params); + this.name = name; // string literal so minifier doesn't overwrite + // Polyfill for cause property + if (params[1]?.cause !== this.cause) { + Object.defineProperty(this, 'cause', { configurable: true, writable: true, value: params[1]?.cause }); + } + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}); +export const ChelErrorWarning = ChelErrorGenerator('ChelErrorWarning'); +export const ChelErrorAlreadyProcessed = ChelErrorGenerator('ChelErrorAlreadyProcessed'); +export const ChelErrorDBBadPreviousHEAD = ChelErrorGenerator('ChelErrorDBBadPreviousHEAD'); +export const ChelErrorDBConnection = ChelErrorGenerator('ChelErrorDBConnection'); +export const ChelErrorUnexpected = ChelErrorGenerator('ChelErrorUnexpected'); +export const ChelErrorKeyAlreadyExists = ChelErrorGenerator('ChelErrorKeyAlreadyExists'); +export const ChelErrorUnrecoverable = ChelErrorGenerator('ChelErrorUnrecoverable'); +export const ChelErrorForkedChain = ChelErrorGenerator('ChelErrorForkedChain'); +export const ChelErrorDecryptionError = ChelErrorGenerator('ChelErrorDecryptionError'); +export const ChelErrorDecryptionKeyNotFound = ChelErrorGenerator('ChelErrorDecryptionKeyNotFound', ChelErrorDecryptionError); +export const ChelErrorSignatureError = ChelErrorGenerator('ChelErrorSignatureError'); +export const ChelErrorSignatureKeyUnauthorized = ChelErrorGenerator('ChelErrorSignatureKeyUnauthorized', ChelErrorSignatureError); +export const ChelErrorSignatureKeyNotFound = ChelErrorGenerator('ChelErrorSignatureKeyNotFound', ChelErrorSignatureError); +export const ChelErrorFetchServerTimeFailed = ChelErrorGenerator('ChelErrorFetchServerTimeFailed'); +export const ChelErrorUnexpectedHttpResponseCode = ChelErrorGenerator('ChelErrorUnexpectedHttpResponseCode'); +export const ChelErrorResourceGone = ChelErrorGenerator('ChelErrorResourceGone', ChelErrorUnexpectedHttpResponseCode); diff --git a/dist/esm/events.d.mts b/dist/esm/events.d.mts new file mode 100644 index 0000000..909146b --- /dev/null +++ b/dist/esm/events.d.mts @@ -0,0 +1,15 @@ +export declare const CHELONIA_RESET = "chelonia-reset"; +export declare const CONTRACT_IS_SYNCING = "contract-is-syncing"; +export declare const CONTRACTS_MODIFIED = "contracts-modified"; +export declare const CONTRACTS_MODIFIED_READY = "contracts-modified-ready"; +export declare const EVENT_HANDLED = "event-handled"; +export declare const EVENT_PUBLISHED = "event-published"; +export declare const EVENT_PUBLISHING_ERROR = "event-publishing-error"; +export declare const EVENT_HANDLED_READY = "event-handled-ready"; +export declare const CONTRACT_REGISTERED = "contract-registered"; +export declare const CONTRACT_UNREGISTERED = "contract-unregistered"; +export declare const CONTRACT_IS_PENDING_KEY_REQUESTS = "contract-is-pending-key-requests"; +export declare const CONTRACT_HAS_RECEIVED_KEYS = "contract-has-received-keys"; +export declare const PERSISTENT_ACTION_FAILURE = "persistent-action-failure"; +export declare const PERSISTENT_ACTION_SUCCESS = "persistent-action-success"; +export declare const PERSISTENT_ACTION_TOTAL_FAILURE = "persistent-action-total_failure"; diff --git a/dist/esm/events.mjs b/dist/esm/events.mjs new file mode 100644 index 0000000..af64a53 --- /dev/null +++ b/dist/esm/events.mjs @@ -0,0 +1,15 @@ +export const CHELONIA_RESET = 'chelonia-reset'; +export const CONTRACT_IS_SYNCING = 'contract-is-syncing'; +export const CONTRACTS_MODIFIED = 'contracts-modified'; +export const CONTRACTS_MODIFIED_READY = 'contracts-modified-ready'; +export const EVENT_HANDLED = 'event-handled'; +export const EVENT_PUBLISHED = 'event-published'; +export const EVENT_PUBLISHING_ERROR = 'event-publishing-error'; +export const EVENT_HANDLED_READY = 'event-handled-ready'; +export const CONTRACT_REGISTERED = 'contract-registered'; +export const CONTRACT_UNREGISTERED = 'contract-unregistered'; +export const CONTRACT_IS_PENDING_KEY_REQUESTS = 'contract-is-pending-key-requests'; +export const CONTRACT_HAS_RECEIVED_KEYS = 'contract-has-received-keys'; +export const PERSISTENT_ACTION_FAILURE = 'persistent-action-failure'; +export const PERSISTENT_ACTION_SUCCESS = 'persistent-action-success'; +export const PERSISTENT_ACTION_TOTAL_FAILURE = 'persistent-action-total_failure'; diff --git a/dist/esm/files.d.mts b/dist/esm/files.d.mts new file mode 100644 index 0000000..42e8ad7 --- /dev/null +++ b/dist/esm/files.d.mts @@ -0,0 +1,31 @@ +import { CheloniaContext, ChelFileManifest } from './types.mjs'; +export declare const aes256gcmHandlers: { + upload: (_chelonia: CheloniaContext, manifestOptions: ChelFileManifest) => { + cipherParams: { + keyId: string; + }; + streamHandler: (stream: ReadableStream) => Promise>; + downloadParams: { + IKM: string; + rs: number; + }; + }; + download: (chelonia: CheloniaContext, downloadParams: { + IKM?: string; + rs?: number; + }, manifest: ChelFileManifest) => { + payloadHandler: () => Promise; + }; +}; +export declare const noneHandlers: { + upload: () => { + cipherParams: undefined; + streamHandler: (stream: ReadableStream) => ReadableStream; + downloadParams: undefined; + }; + download: (chelonia: CheloniaContext, _downloadParams: object, manifest: ChelFileManifest) => { + payloadHandler: () => Promise; + }; +}; +declare const _default: string[]; +export default _default; diff --git a/dist/esm/files.mjs b/dist/esm/files.mjs new file mode 100644 index 0000000..d33edc9 --- /dev/null +++ b/dist/esm/files.mjs @@ -0,0 +1,387 @@ +import encodeMultipartMessage from '@apeleghq/multipart-parser/encodeMultipartMessage'; +import decrypt from '@apeleghq/rfc8188/decrypt'; +import { aes256gcm } from '@apeleghq/rfc8188/encodings'; +import encrypt from '@apeleghq/rfc8188/encrypt'; +import { generateSalt } from '@chelonia/crypto'; +import { coerce } from '@chelonia/multiformats/bytes'; +import sbp from '@sbp/sbp'; +import { has } from 'turtledash'; +import { blake32Hash, createCID, createCIDfromStream, multicodes } from './functions.mjs'; +import { buildShelterAuthorizationHeader } from './utils.mjs'; +// Snippet from +// Node.js supports request streams, but also this check isn't meant for Node.js +// This part only checks for client-side support. Later, when we try uploading +// a file for the first time, we'll check if requests work, as streams are not +// supported in HTTP/1.1 and lower versions. +let supportsRequestStreams = typeof window !== 'object' || (() => { + let duplexAccessed = false; + const hasContentType = new Request('', { + body: new ReadableStream(), + method: 'POST', + get duplex() { + duplexAccessed = true; + return 'half'; + } + }).headers.has('content-type'); + return duplexAccessed && !hasContentType; +})(); +const streamToUint8Array = async (s) => { + const reader = s.getReader(); + const chunks = []; + let length = 0; + for (;;) { + const result = await reader.read(); + if (result.done) + break; + chunks.push(coerce(result.value)); + length += result.value.byteLength; + } + const body = new Uint8Array(length); + chunks.reduce((offset, chunk) => { + body.set(chunk, offset); + return offset + chunk.byteLength; + }, 0); + return body; +}; +// Check for streaming support, as of today (Feb 2024) only Blink- +// based browsers support this (i.e., Firefox and Safari don't). +const ArrayBufferToUint8ArrayStream = async function (connectionURL, s) { + // Even if the browser supports streams, some browsers (e.g., Chrome) also + // require that the server support HTTP/2 + if (supportsRequestStreams === true) { + await this.config.fetch(`${connectionURL}/streams-test`, { + method: 'POST', + body: new ReadableStream({ start(c) { c.enqueue(Buffer.from('ok')); c.close(); } }), + duplex: 'half' + }).then((r) => { + if (!r.ok) + throw new Error('Unexpected response'); + // supportsRequestStreams is tri-state + supportsRequestStreams = 2; + }).catch(() => { + console.info('files: Disabling streams support because the streams test failed'); + supportsRequestStreams = false; + }); + } + if (!supportsRequestStreams) { + return await streamToUint8Array(s); + } + return s.pipeThrough( + // eslint-disable-next-line no-undef + new TransformStream({ + transform(chunk, controller) { + controller.enqueue(coerce(chunk)); + } + })); +}; +const computeChunkDescriptors = (inStream) => { + let length = 0; + const [lengthStream, cidStream] = inStream.tee(); + const lengthPromise = new Promise((resolve, reject) => { + lengthStream.pipeTo(new WritableStream({ + write(chunk) { + length += chunk.byteLength; + }, + close() { + resolve(length); + }, + abort(reason) { + reject(reason); + } + })); + }); + const cidPromise = createCIDfromStream(cidStream, multicodes.SHELTER_FILE_CHUNK); + return Promise.all([lengthPromise, cidPromise]); +}; +const fileStream = (chelonia, manifest) => { + const dataGenerator = async function* () { + let readSize = 0; + for (const chunk of manifest.chunks) { + if (!Array.isArray(chunk) || + typeof chunk[0] !== 'number' || + typeof chunk[1] !== 'string') { + throw new Error('Invalid chunk descriptor'); + } + const chunkResponse = await chelonia.config.fetch(`${chelonia.config.connectionURL}/file/${chunk[1]}`, { + method: 'GET', + signal: chelonia.abortController.signal + }); + if (!chunkResponse.ok) { + throw new Error('Unable to retrieve manifest'); + } + // TODO: We're reading the chunks in their entirety instead of using the + // stream interface. In the future, this can be changed to get a stream + // instead. Ensure then that the following checks are replaced with a + // streaming version (length and CID) + const chunkBinary = await chunkResponse.arrayBuffer(); + if (chunkBinary.byteLength !== chunk[0]) + throw new Error('mismatched chunk size'); + readSize += chunkBinary.byteLength; + if (readSize > manifest.size) + throw new Error('read size exceeds declared size'); + if (createCID(coerce(chunkBinary), multicodes.SHELTER_FILE_CHUNK) !== chunk[1]) + throw new Error('mismatched chunk hash'); + yield chunkBinary; + } + // Now that we're done, we check to see if we read the correct size + // If all went well, we should have and this would never throw. However, + // if the payload was tampered with, we could have read a different size + // than expected. This will throw at the end, after all chunks are processed + // and after some or all of the data have already been consumed. + // If integrity of the entire payload is important, consumers must buffer + // the stream and wait until the end before any processing. + if (readSize !== manifest.size) + throw new Error('mismatched size'); + }; + const dataIterator = dataGenerator(); + return new ReadableStream({ + async pull(controller) { + try { + const chunk = await dataIterator.next(); + if (chunk.done) { + controller.close(); + return; + } + controller.enqueue(chunk.value); + } + catch (e) { + controller.error(e); + } + } + }); +}; +export const aes256gcmHandlers = { + upload: (_chelonia, manifestOptions) => { + // IKM stands for Input Keying Material, and is a random value used to + // derive the encryption used in the chunks. See RFC 8188 for how the + // actual encryption key gets derived from the IKM. + const params = manifestOptions['cipher-params']; + let IKM = params?.IKM; + const recordSize = (params?.rs ?? 1 << 16); + if (!IKM) { + IKM = new Uint8Array(33); + self.crypto.getRandomValues(IKM); + } + // The keyId is only used as a sanity check but otherwise it is not needed + // Because the keyId is computed from the IKM, which is a secret, it is + // truncated to just eight characters so that it doesn't disclose too much + // information about the IKM (in theory, since it's a random string 33 bytes + // long, a full hash shouldn't disclose too much information anyhow). + // The reason the keyId is not _needed_ is that the IKM is part of the + // downloadParams, so anyone downloading a file should have the required + // context, and there is exactly one valid IKM for any downloadParams. + // By truncating the keyId, the only way to fully verify whether a given + // IKM decrypts a file is by attempting decryption. + // A side-effect of truncating the keyId is that, if the IKM were shared + // some other way (e.g., using the OP_KEY_SHARE mechanism), because of + // collisions it may not always be possible to look up the correct IKM. + // Therefore, a handler that uses a different strategy than the one used + // here (namely, including the IKM in the downloadParams) may need to use + // longer key IDs, possibly a full hash. + const keyId = blake32Hash('aes256gcm-keyId' + blake32Hash(IKM)).slice(-8); + const binaryKeyId = Buffer.from(keyId); + return { + cipherParams: { + keyId + }, + streamHandler: async (stream) => { + return await encrypt(aes256gcm, stream, recordSize, binaryKeyId, IKM); + }, + downloadParams: { + IKM: Buffer.from(IKM).toString('base64'), + rs: recordSize + } + }; + }, + download: (chelonia, downloadParams, manifest) => { + const IKMb64 = downloadParams.IKM; + if (!IKMb64) { + throw new Error('Missing IKM in downloadParams'); + } + const IKM = Buffer.from(IKMb64, 'base64'); + const keyId = blake32Hash('aes256gcm-keyId' + blake32Hash(IKM)).slice(-8); + if (!manifest['cipher-params'] || !manifest['cipher-params'].keyId) { + throw new Error('Missing cipher-params'); + } + if (keyId !== manifest['cipher-params'].keyId) { + throw new Error('Key ID mismatch'); + } + const maxRecordSize = downloadParams.rs ?? 1 << 27; // 128 MiB + return { + payloadHandler: async () => { + const bytes = await streamToUint8Array(decrypt(aes256gcm, fileStream(chelonia, manifest), (actualKeyId) => { + if (Buffer.from(actualKeyId).toString() !== keyId) { + throw new Error('Invalid key ID'); + } + return IKM; + }, maxRecordSize)); + return new Blob([bytes], { type: manifest.type || 'application/octet-stream' }); + } + }; + } +}; +export const noneHandlers = { + upload: () => { + return { + cipherParams: undefined, + streamHandler: (stream) => { + return stream; + }, + downloadParams: undefined + }; + }, + download: (chelonia, _downloadParams, manifest) => { + return { + payloadHandler: async () => { + const bytes = await streamToUint8Array(fileStream(chelonia, manifest)); + return new Blob([bytes], { type: manifest.type || 'application/octet-stream' }); + } + }; + } +}; +// TODO: Move into Chelonia config +const cipherHandlers = { + aes256gcm: aes256gcmHandlers, + none: noneHandlers +}; +export default sbp('sbp/selectors/register', { + 'chelonia/fileUpload': async function (chunks, manifestOptions, { billableContractID } = {}) { + if (!Array.isArray(chunks)) + chunks = [chunks]; + const chunkDescriptors = []; + const cipherHandler = await cipherHandlers[manifestOptions.cipher]?.upload?.(this, manifestOptions); + if (!cipherHandler) + throw new Error('Unsupported cipher'); + const cipherParams = cipherHandler.cipherParams; + const transferParts = await Promise.all(chunks.map(async (chunk, i) => { + const stream = chunk.stream(); + const encryptedStream = await cipherHandler.streamHandler(stream); + const [body, s] = encryptedStream.tee(); + chunkDescriptors.push(computeChunkDescriptors(s)); + return { + headers: new Headers([ + ['content-disposition', `form-data; name="${i}"; filename="${i}"`], + ['content-type', 'application/octet-stream'] + ]), + body + }; + })); + transferParts.push({ + headers: new Headers([ + ['content-disposition', 'form-data; name="manifest"; filename="manifest.json"'], + ['content-type', 'application/vnd.shelter.filemanifest'] + ]), + body: new ReadableStream({ + async start(controller) { + const chunks = await Promise.all(chunkDescriptors); + const manifest = { + version: '1.0.0', + // ?? undefined coerces null and undefined to undefined + // This ensures that null or undefined values don't make it to the + // JSON (otherwise, null values _would_ be stringified as 'null') + type: manifestOptions.type ?? undefined, + meta: manifestOptions.meta ?? undefined, + cipher: manifestOptions.cipher, + 'cipher-params': cipherParams, + size: chunks.reduce((acc, [cv]) => acc + cv, 0), + chunks, + 'name-map': manifestOptions['name-map'] ?? undefined, + alternatives: manifestOptions.alternatives ?? undefined + }; + controller.enqueue(Buffer.from(JSON.stringify(manifest))); + controller.close(); + } + }) + }); + // TODO: Using `self.crypto.randomUUID` breaks the tests. Maybe upgrading + // Cypress would fix this. + const boundary = typeof self.crypto?.randomUUID === 'function' + ? self.crypto.randomUUID() + // If randomUUID not available, we instead compute a random boundary + // The indirect call to Math.random (`(0, Math.random)`) is to explicitly + // mark that we intend on using Math.random, even though it's not a + // CSPRNG, so that it's not reported as a bug in by static analysis tools. + : new Array(36).fill('').map(() => 'abcdefghijklmnopqrstuvwxyz'[(0, Math.random)() * 26 | 0]).join(''); + const stream = encodeMultipartMessage(boundary, transferParts); + const deletionToken = 'deletionToken' + generateSalt(); + const deletionTokenHash = blake32Hash(deletionToken); + const uploadResponse = await this.config.fetch(`${this.config.connectionURL}/file`, { + method: 'POST', + signal: this.abortController.signal, + body: await ArrayBufferToUint8ArrayStream.call(this, this.config.connectionURL, stream), + headers: new Headers([ + ...(billableContractID ? [['authorization', buildShelterAuthorizationHeader.call(this, billableContractID)]] : []), + ['content-type', `multipart/form-data; boundary=${boundary}`], + ['shelter-deletion-token-digest', deletionTokenHash] + ]), + duplex: 'half' + }); + if (!uploadResponse.ok) + throw new Error('Error uploading file'); + return { + download: { + manifestCid: await uploadResponse.text(), + downloadParams: cipherHandler.downloadParams + }, + delete: deletionToken + }; + }, + 'chelonia/fileDownload': async function (downloadOptions, manifestChecker) { + // Using a function to prevent accidental logging + const { manifestCid, downloadParams } = downloadOptions.valueOf(); + const manifestResponse = await this.config.fetch(`${this.config.connectionURL}/file/${manifestCid}`, { + method: 'GET', + signal: this.abortController.signal + }); + if (!manifestResponse.ok) { + throw new Error('Unable to retrieve manifest'); + } + const manifestBinary = await manifestResponse.arrayBuffer(); + if (createCID(coerce(manifestBinary), multicodes.SHELTER_FILE_MANIFEST) !== manifestCid) + throw new Error('mismatched manifest hash'); + const manifest = JSON.parse(Buffer.from(manifestBinary).toString()); + if (typeof manifest !== 'object') + throw new Error('manifest format is invalid'); + if (manifest.version !== '1.0.0') + throw new Error('unsupported manifest version'); + if (!Array.isArray(manifest.chunks)) + throw new Error('missing required field: chunks'); + if (manifestChecker) { + const proceed = await manifestChecker?.(manifest); + if (!proceed) + return false; + } + const cipherHandler = await cipherHandlers[manifest.cipher]?.download?.(this, downloadParams, manifest); + if (!cipherHandler) + throw new Error('Unsupported cipher'); + return cipherHandler.payloadHandler(); + }, + 'chelonia/fileDelete': async function (manifestCid, credentials = {}) { + if (!manifestCid) { + throw new TypeError('A manifest CID must be provided'); + } + if (!Array.isArray(manifestCid)) + manifestCid = [manifestCid]; + return await Promise.allSettled(manifestCid.map(async (cid) => { + const hasCredential = has(credentials, cid); + const hasToken = has(credentials[cid], 'token') && credentials[cid].token; + const hasBillableContractID = has(credentials[cid], 'billableContractID') && credentials[cid].billableContractID; + if (!hasCredential || hasToken === hasBillableContractID) { + throw new TypeError(`Either a token or a billable contract ID must be provided for ${cid}`); + } + const response = await this.config.fetch(`${this.config.connectionURL}/deleteFile/${cid}`, { + method: 'POST', + signal: this.abortController.signal, + headers: new Headers([ + ['authorization', + hasToken + ? `bearer ${credentials[cid].token.valueOf()}` + : buildShelterAuthorizationHeader.call(this, credentials[cid].billableContractID)] + ]) + }); + if (!response.ok) { + throw new Error(`Unable to delete file ${cid}`); + } + })); + } +}); diff --git a/dist/esm/functions.d.mts b/dist/esm/functions.d.mts new file mode 100644 index 0000000..4933e96 --- /dev/null +++ b/dist/esm/functions.d.mts @@ -0,0 +1,15 @@ +import { CID } from '@chelonia/multiformats/cid'; +import { Buffer } from 'buffer'; +export declare const multicodes: Record; +export declare const parseCID: (cid: string) => CID; +export declare const maybeParseCID: (cid: string) => CID | null; +export declare function createCIDfromStream(data: string | Uint8Array | ReadableStream, multicode?: number): Promise; +export declare function createCID(data: string | Uint8Array, multicode?: number): string; +export declare function blake32Hash(data: string | Uint8Array): string; +export declare const b64ToBuf: (b64: string) => Buffer; +export declare const b64ToStr: (b64: string) => string; +export declare const bufToB64: (buf: Buffer) => string; +export declare const strToBuf: (str: string) => Buffer; +export declare const strToB64: (str: string) => string; +export declare const bytesToB64: (ary: Uint8Array) => string; +export declare const getSubscriptionId: (subscriptionInfo: ReturnType) => Promise; diff --git a/dist/esm/functions.mjs b/dist/esm/functions.mjs new file mode 100644 index 0000000..80b9525 --- /dev/null +++ b/dist/esm/functions.mjs @@ -0,0 +1,114 @@ +import { base58btc } from '@chelonia/multiformats/bases/base58'; +import { blake2b256 } from '@chelonia/multiformats/blake2b'; +import { blake2b256stream } from '@chelonia/multiformats/blake2bstream'; +import { CID } from '@chelonia/multiformats/cid'; +// Use 'buffer' instead of 'node:buffer' to polyfill in the browser +import { Buffer } from 'buffer'; +import { has } from 'turtledash'; +// Values from https://github.com/multiformats/multicodec/blob/master/table.csv +export const multicodes = { + RAW: 0x00, + JSON: 0x0200, + SHELTER_CONTRACT_MANIFEST: 0x511e00, + SHELTER_CONTRACT_TEXT: 0x511e01, + SHELTER_CONTRACT_DATA: 0x511e02, + SHELTER_FILE_MANIFEST: 0x511e03, + SHELTER_FILE_CHUNK: 0x511e04 +}; +export const parseCID = (cid) => { + if (!cid || cid.length < 52 || cid.length > 64) { + throw new RangeError('CID length too short or too long'); + } + const parsed = CID.parse(cid, base58btc); + if (parsed.version !== 1 || + parsed.multihash.code !== blake2b256.code || + !Object.values(multicodes).includes(parsed.code)) { + throw new Error('Invalid CID'); + } + return parsed; +}; +export const maybeParseCID = (cid) => { + try { + return parseCID(cid); + } + catch { + // Ignore errors if the CID couldn't be parsed + return null; + } +}; +// Makes the `Buffer` global available in the browser if needed. +if (typeof globalThis === 'object' && !has(globalThis, 'Buffer')) { + globalThis.Buffer = Buffer; +} +export async function createCIDfromStream(data, multicode = multicodes.RAW) { + const uint8array = typeof data === 'string' ? new TextEncoder().encode(data) : data; + const digest = await blake2b256stream.digest(uint8array); + return CID.create(1, multicode, digest).toString(base58btc); +} +// TODO: implement a streaming hashing function for large files. +// Note: in fact this returns a serialized CID, not a CID object. +export function createCID(data, multicode = multicodes.RAW) { + const uint8array = typeof data === 'string' ? new TextEncoder().encode(data) : data; + const digest = blake2b256.digest(uint8array); + return CID.create(1, multicode, digest).toString(base58btc); +} +export function blake32Hash(data) { + const uint8array = typeof data === 'string' ? new TextEncoder().encode(data) : data; + const digest = blake2b256.digest(uint8array); + // While `digest.digest` is only 32 bytes long in this case, + // `digest.bytes` is 36 bytes because it includes a multiformat prefix. + return base58btc.encode(digest.bytes); +} +// NOTE: to preserve consistency across browser and node, we use the Buffer +// class. We could use btoa and atob in web browsers (functions that +// are unavailable on Node.js), but they do not support Unicode, +// and you have to jump through some hoops to get it to work: +// https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/btoa#Unicode_strings +// These hoops might result in inconsistencies between Node.js and the frontend. +export const b64ToBuf = (b64) => Buffer.from(b64, 'base64'); +export const b64ToStr = (b64) => b64ToBuf(b64).toString('utf8'); +export const bufToB64 = (buf) => Buffer.from(buf).toString('base64'); +export const strToBuf = (str) => Buffer.from(str, 'utf8'); +export const strToB64 = (str) => strToBuf(str).toString('base64'); +export const bytesToB64 = (ary) => Buffer.from(ary).toString('base64'); +// Generate an UUID from a `PushSubscription' +export const getSubscriptionId = async (subscriptionInfo) => { + const textEncoder = new TextEncoder(); + // + const endpoint = textEncoder.encode(subscriptionInfo.endpoint); + // + const p256dh = textEncoder.encode(subscriptionInfo.keys.p256dh); + const auth = textEncoder.encode(subscriptionInfo.keys.auth); + const canonicalForm = new ArrayBuffer(8 + + (4 + endpoint.byteLength) + (2 + p256dh.byteLength) + + (2 + auth.byteLength)); + const canonicalFormU8 = new Uint8Array(canonicalForm); + const canonicalFormDV = new DataView(canonicalForm); + let offset = 0; + canonicalFormDV.setFloat64(offset, subscriptionInfo.expirationTime == null + ? NaN + : subscriptionInfo.expirationTime, false); + offset += 8; + canonicalFormDV.setUint32(offset, endpoint.byteLength, false); + offset += 4; + canonicalFormU8.set(endpoint, offset); + offset += endpoint.byteLength; + canonicalFormDV.setUint16(offset, p256dh.byteLength, false); + offset += 2; + canonicalFormU8.set(p256dh, offset); + offset += p256dh.byteLength; + canonicalFormDV.setUint16(offset, auth.byteLength, false); + offset += 2; + canonicalFormU8.set(auth, offset); + const digest = await crypto.subtle.digest('SHA-384', canonicalForm); + const id = Buffer.from(digest.slice(0, 16)); + id[6] = 0x80 | (id[6] & 0x0F); + id[8] = 0x80 | (id[8] & 0x3F); + return [ + id.slice(0, 4), + id.slice(4, 6), + id.slice(6, 8), + id.slice(8, 10), + id.slice(10, 16) + ].map((p) => p.toString('hex')).join('-'); +}; diff --git a/dist/esm/index.d.mts b/dist/esm/index.d.mts new file mode 100644 index 0000000..36cecd5 --- /dev/null +++ b/dist/esm/index.d.mts @@ -0,0 +1,17 @@ +export * from './SPMessage.mjs'; +export * from './Secret.mjs'; +export * from './chelonia.mjs'; +export * from './constants.mjs'; +export * from './db.mjs'; +export * from './encryptedData.mjs'; +export * from './errors.mjs'; +export * from './events.mjs'; +export * from './files.mjs'; +export * from './persistent-actions.mjs'; +export * from './presets.mjs'; +export * from './pubsub/index.mjs'; +export * from './signedData.mjs'; +export * from './types.mjs'; +export * from './utils.mjs'; +declare const _default: string[]; +export default _default; diff --git a/dist/esm/index.mjs b/dist/esm/index.mjs new file mode 100644 index 0000000..d615161 --- /dev/null +++ b/dist/esm/index.mjs @@ -0,0 +1,20 @@ +import chelonia from './chelonia.mjs'; +import db from './db.mjs'; +import files from './files.mjs'; +import persistentActions from './persistent-actions.mjs'; +export * from './SPMessage.mjs'; +export * from './Secret.mjs'; +export * from './chelonia.mjs'; +export * from './constants.mjs'; +export * from './db.mjs'; +export * from './encryptedData.mjs'; +export * from './errors.mjs'; +export * from './events.mjs'; +export * from './files.mjs'; +export * from './persistent-actions.mjs'; +export * from './presets.mjs'; +export * from './pubsub/index.mjs'; +export * from './signedData.mjs'; +export * from './types.mjs'; +export * from './utils.mjs'; +export default [...chelonia, ...db, ...files, ...persistentActions]; diff --git a/dist/esm/index.test.d.mts b/dist/esm/index.test.d.mts new file mode 100644 index 0000000..8927737 --- /dev/null +++ b/dist/esm/index.test.d.mts @@ -0,0 +1,3 @@ +import './encryptedData.test.mjs'; +import './persistent-actions.test.mjs'; +import './pubsub/index.test.mjs'; diff --git a/dist/esm/index.test.mjs b/dist/esm/index.test.mjs new file mode 100644 index 0000000..8927737 --- /dev/null +++ b/dist/esm/index.test.mjs @@ -0,0 +1,3 @@ +import './encryptedData.test.mjs'; +import './persistent-actions.test.mjs'; +import './pubsub/index.test.mjs'; diff --git a/dist/esm/internals.d.mts b/dist/esm/internals.d.mts new file mode 100644 index 0000000..12108a4 --- /dev/null +++ b/dist/esm/internals.d.mts @@ -0,0 +1,3 @@ +import './db.mjs'; +declare const _default: string[]; +export default _default; diff --git a/dist/esm/internals.mjs b/dist/esm/internals.mjs new file mode 100644 index 0000000..9f13d4f --- /dev/null +++ b/dist/esm/internals.mjs @@ -0,0 +1,2206 @@ +import sbp, { domainFromSelector } from '@sbp/sbp'; +import { multicodes } from './functions.mjs'; +import { cloneDeep, debounce, delay, has, pick, randomIntFromRange } from 'turtledash'; +import { SPMessage } from './SPMessage.mjs'; +import { Secret } from './Secret.mjs'; +import { INVITE_STATUS } from './constants.mjs'; +import { deserializeKey, keyId, verifySignature } from '@chelonia/crypto'; +import './db.mjs'; +import { encryptedIncomingData, encryptedOutgoingData } from './encryptedData.mjs'; +import { ChelErrorKeyAlreadyExists, ChelErrorResourceGone, ChelErrorUnrecoverable, ChelErrorWarning, ChelErrorDBBadPreviousHEAD, ChelErrorAlreadyProcessed, ChelErrorFetchServerTimeFailed, ChelErrorForkedChain } from './errors.mjs'; +import { CONTRACTS_MODIFIED, CONTRACT_HAS_RECEIVED_KEYS, CONTRACT_IS_SYNCING, EVENT_HANDLED, EVENT_PUBLISHED, EVENT_PUBLISHING_ERROR } from './events.mjs'; +import { buildShelterAuthorizationHeader, findKeyIdByName, findSuitablePublicKeyIds, findSuitableSecretKeyId, getContractIDfromKeyId, handleFetchResult, keyAdditionProcessor, logEvtError, recreateEvent, validateKeyPermissions, validateKeyAddPermissions, validateKeyDelPermissions, validateKeyUpdatePermissions } from './utils.mjs'; +import { isSignedData, signedIncomingData } from './signedData.mjs'; +// import 'ses' +// Used for temporarily storing the missing decryption key IDs in a given +// message +const missingDecryptionKeyIdsMap = new WeakMap(); +const getMsgMeta = function (message, contractID, state, index) { + const signingKeyId = message.signingKeyId(); + let innerSigningKeyId = null; + const config = this.config; + const result = { + signingKeyId, + get signingContractID() { + return getContractIDfromKeyId(contractID, signingKeyId, state); + }, + get innerSigningKeyId() { + if (innerSigningKeyId === null) { + const value = message.message(); + const data = config.unwrapMaybeEncryptedData(value); + if (data?.data && isSignedData(data.data)) { + innerSigningKeyId = data.data.signingKeyId; + } + else { + innerSigningKeyId = undefined; + } + return innerSigningKeyId; + } + }, + get innerSigningContractID() { + return getContractIDfromKeyId(contractID, result.innerSigningKeyId, state); + }, + index + }; + return result; +}; +const keysToMap = function (keys_, height, authorizedKeys) { + // Using cloneDeep to ensure that the returned object is serializable + // Keys in a SPMessage may not be serializable (i.e., supported by the + // structured clone algorithm) when they contain encryptedIncomingData + const keys = keys_.map((key) => { + const data = this.config.unwrapMaybeEncryptedData(key); + if (!data) + return undefined; + if (data.encryptionKeyId) { + data.data._private = data.encryptionKeyId; + } + return data.data; + // eslint-disable-next-line no-use-before-define + }).filter(Boolean); + const keysCopy = cloneDeep(keys); + return Object.fromEntries(keysCopy.map((key) => { + key._notBeforeHeight = height; + if (authorizedKeys?.[key.id]) { + if (authorizedKeys[key.id]._notAfterHeight == null) { + throw new ChelErrorKeyAlreadyExists(`Cannot set existing unrevoked key: ${key.id}`); + } + // If the key was get previously, preserve its _notBeforeHeight + // NOTE: (SECURITY) This may allow keys for periods for which it wasn't + // supposed to be active. This is a trade-off for simplicity, instead of + // considering discrete periods, which is the correct solution + // Discrete ranges *MUST* be implemented because they impact permissions + key._notBeforeHeight = Math.min(height, authorizedKeys[key.id]._notBeforeHeight ?? 0); + } + else { + key._notBeforeHeight = height; + } + delete key._notAfterHeight; + return [key.id, key]; + })); +}; +const keyRotationHelper = (contractID, state, config, updatedKeysMap, requiredPermissions, outputSelector, outputMapper, internalSideEffectStack) => { + if (!internalSideEffectStack || !Array.isArray(state._volatile?.watch)) + return; + const rootState = sbp(config.stateSelector); + const watchMap = Object.create(null); + state._volatile.watch.forEach(([name, cID]) => { + if (!updatedKeysMap[name] || watchMap[cID] === null) { + return; + } + if (!watchMap[cID]) { + if (!rootState.contracts[cID]?.type || !findSuitableSecretKeyId(rootState[cID], [SPMessage.OP_KEY_UPDATE], ['sig'])) { + watchMap[cID] = null; + return; + } + watchMap[cID] = []; + } + watchMap[cID].push(name); + }); + Object.entries(watchMap).forEach(([cID, names]) => { + if (!Array.isArray(names) || !names.length) + return; + const [keyNamesToUpdate, signingKeyId] = names.map((name) => { + const foreignContractKey = rootState[cID]?._vm?.authorizedKeys?.[updatedKeysMap[name].oldKeyId]; + if (!foreignContractKey) + return undefined; + const signingKeyId = findSuitableSecretKeyId(rootState[cID], requiredPermissions, ['sig'], foreignContractKey.ringLevel); + if (signingKeyId) { + return [[name, foreignContractKey.name], signingKeyId, rootState[cID]._vm.authorizedKeys[signingKeyId].ringLevel]; + } + return undefined; + // eslint-disable-next-line no-use-before-define + }).filter(Boolean) + .reduce((acc, [name, signingKeyId, ringLevel]) => { + acc[0].push(name); + return ringLevel < acc[2] ? [acc[0], signingKeyId, ringLevel] : acc; + }, [[], undefined, Number.POSITIVE_INFINITY]); + if (!signingKeyId) + return; + // Send output based on keyNamesToUpdate, signingKeyId + const contractName = rootState.contracts[cID]?.type; + internalSideEffectStack?.push(() => { + // We can't await because it'll block on a different contract, which + // is possibly waiting on this current contract. + sbp(outputSelector, { + contractID: cID, + contractName, + data: keyNamesToUpdate.map(outputMapper).map((v) => { + return v; + }), + signingKeyId + }).catch((e) => { + console.warn(`Error mirroring key operation (${outputSelector}) from ${contractID} to ${cID}: ${e?.message || e}`); + }); + }); + }); +}; +// export const FERAL_FUNCTION = Function +export default sbp('sbp/selectors/register', { + // DO NOT CALL ANY OF THESE YOURSELF! + 'chelonia/private/state': function () { + return this.state; + }, + 'chelonia/private/invoke': function (instance, invocation) { + // If this._instance !== instance (i.e., chelonia/reset was called) + if (this._instance !== instance) { + console.info('[\'chelonia/private/invoke] Not proceeding with invocation as Chelonia was restarted', { invocation }); + return; + } + if (Array.isArray(invocation)) { + return sbp(...invocation); + } + else if (typeof invocation === 'function') { + return invocation(); + } + else { + throw new TypeError(`[chelonia/private/invoke] Expected invocation to be an array or a function. Saw ${typeof invocation} instead.`); + } + }, + 'chelonia/private/queueEvent': function (queueName, invocation) { + return sbp('okTurtles.eventQueue/queueEvent', queueName, ['chelonia/private/invoke', this._instance, invocation]); + }, + 'chelonia/private/verifyManifestSignature': function (contractName, manifestHash, manifest) { + // We check that the manifest contains a 'signature' field with the correct + // shape + if (!has(manifest, 'signature') || typeof manifest.signature.keyId !== 'string' || typeof manifest.signature.value !== 'string') { + throw new Error(`Invalid or missing signature field for manifest ${manifestHash} (named ${contractName})`); + } + // Now, start the signature verification process + const rootState = sbp(this.config.stateSelector); + if (!has(rootState, 'contractSigningKeys')) { + this.config.reactiveSet(rootState, 'contractSigningKeys', Object.create(null)); + } + // Because `contractName` comes from potentially unsafe sources (for + // instance, from `processMessage`), the key isn't used directly because + // it could overlap with current or future 'special' key names in JavaScript, + // such as `prototype`, `__proto__`, etc. We also can't guarantee that the + // `contractSigningKeys` always has a null prototype, and, because of the + // way we manage state, neither can we use `Map`. So, we use prefix for the + // lookup key that's unlikely to ever be part of a special JS name. + const contractNameLookupKey = `name:${contractName}`; + // If the contract name has been seen before, validate its signature now + let signatureValidated = false; + if (process.env.UNSAFE_TRUST_ALL_MANIFEST_SIGNING_KEYS !== 'true' && has(rootState.contractSigningKeys, contractNameLookupKey)) { + console.info(`[chelonia] verifying signature for ${manifestHash} with an existing key`); + if (!has(rootState.contractSigningKeys[contractNameLookupKey], manifest.signature.keyId)) { + console.error(`The manifest with ${manifestHash} (named ${contractName}) claims to be signed with a key with ID ${manifest.signature.keyId}, which is not trusted. The trusted key IDs for this name are:`, Object.keys(rootState.contractSigningKeys[contractNameLookupKey])); + throw new Error(`Invalid or missing signature in manifest ${manifestHash} (named ${contractName}). It claims to be signed with a key with ID ${manifest.signature.keyId}, which has not been authorized for this contract before.`); + } + const signingKey = rootState.contractSigningKeys[contractNameLookupKey][manifest.signature.keyId]; + verifySignature(signingKey, manifest.body + manifest.head, manifest.signature.value); + console.info(`[chelonia] successful signature verification for ${manifestHash} (named ${contractName}) using the already-trusted key ${manifest.signature.keyId}.`); + signatureValidated = true; + } + // Otherwise, when this is a yet-unseen contract, we parse the body to + // see its allowed signers to trust on first-use (TOFU) + const body = JSON.parse(manifest.body); + // If we don't have a list of authorized signatures yet, verify this + // contract's signature and set the auhorized signing keys + if (!signatureValidated) { + console.info(`[chelonia] verifying signature for ${manifestHash} (named ${contractName}) for the first time`); + if (!has(body, 'signingKeys') || !Array.isArray(body.signingKeys)) { + throw new Error(`Invalid manifest file ${manifestHash} (named ${contractName}). Its body doesn't contain a 'signingKeys' list'`); + } + let contractSigningKeys; + try { + contractSigningKeys = Object.fromEntries(body.signingKeys.map((serializedKey) => { + return [ + keyId(serializedKey), + serializedKey + ]; + })); + } + catch (e) { + console.error(`[chelonia] Error parsing the public keys list for ${manifestHash} (named ${contractName})`, e); + throw e; + } + if (!has(contractSigningKeys, manifest.signature.keyId)) { + throw new Error(`Invalid or missing signature in manifest ${manifestHash} (named ${contractName}). It claims to be signed with a key with ID ${manifest.signature.keyId}, which is not listed in its 'signingKeys' field.`); + } + verifySignature(contractSigningKeys[manifest.signature.keyId], manifest.body + manifest.head, manifest.signature.value); + console.info(`[chelonia] successful signature verification for ${manifestHash} (named ${contractName}) using ${manifest.signature.keyId}. The following key IDs will now be trusted for this contract name`, Object.keys(contractSigningKeys)); + signatureValidated = true; + rootState.contractSigningKeys[contractNameLookupKey] = contractSigningKeys; + } + // If verification was successful, return the parsed body to make the newly- + // loaded contract available + return body; + }, + 'chelonia/private/loadManifest': async function (contractName, manifestHash) { + if (!contractName || typeof contractName !== 'string') { + throw new Error('Invalid or missing contract name'); + } + if (this.manifestToContract[manifestHash]) { + console.warn('[chelonia]: already loaded manifest', manifestHash); + return; + } + const manifestSource = await sbp('chelonia/out/fetchResource', manifestHash, { code: multicodes.SHELTER_CONTRACT_MANIFEST }); + const manifest = JSON.parse(manifestSource); + const body = sbp('chelonia/private/verifyManifestSignature', contractName, manifestHash, manifest); + if (body.name !== contractName) { + throw new Error(`Mismatched contract name. Expected ${contractName} but got ${body.name}`); + } + const contractInfo = (this.config.contracts.defaults.preferSlim && body.contractSlim) || body.contract; + console.info(`[chelonia] loading contract '${contractInfo.file}'@'${body.version}' from manifest: ${manifestHash}`); + const source = await sbp('chelonia/out/fetchResource', contractInfo.hash, { code: multicodes.SHELTER_CONTRACT_TEXT }); + const reduceAllow = (acc, v) => { acc[v] = true; return acc; }; + const allowedSels = ['okTurtles.events/on', 'chelonia/defineContract', 'chelonia/out/keyRequest'] + .concat(this.config.contracts.defaults.allowedSelectors) + .reduce(reduceAllow, {}); + const allowedDoms = this.config.contracts.defaults.allowedDomains + .reduce(reduceAllow, {}); + const contractSBP = (selector, ...args) => { + const domain = domainFromSelector(selector); + if (selector.startsWith(contractName + '/')) { + selector = `${manifestHash}/${selector}`; + } + if (allowedSels[selector] || allowedDoms[domain]) { + return sbp(selector, ...args); + } + else { + console.error('[chelonia] selector not on allowlist', { selector, allowedSels, allowedDoms }); + throw new Error(`[chelonia] selector not on allowlist: '${selector}'`); + } + }; + // const saferEval: Function = new FERAL_FUNCTION(` + // eslint-disable-next-line no-new-func + const saferEval = new Function(` + return function (globals) { + // almost a real sandbox + // stops (() => this)().fetch + // needs additional step of locking down Function constructor to stop: + // new (()=>{}).constructor("console.log(typeof this.fetch)")() + globals.self = globals + globals.globalThis = globals + with (new Proxy(globals, { + get (o, p) { return o[p] }, + has (o, p) { /* console.log('has', p); */ return true } + })) { + (function () { + 'use strict' + ${source} + })() + } + } + `)(); + // TODO: lock down Function constructor! could just use SES lockdown() + // or do our own version of it. + // https://github.com/endojs/endo/blob/master/packages/ses/src/tame-function-constructors.js + this.defContractSBP = contractSBP; + this.defContractManifest = manifestHash; + // contracts will also be signed, so even if sandbox breaks we still have protection + saferEval({ + // pass in globals that we want access to by default in the sandbox + // note: you can undefine these by setting them to undefined in exposedGlobals + crypto: { + getRandomValues: (v) => globalThis.crypto.getRandomValues(v) + }, + ...(typeof window === 'object' && window && { + alert: window.alert.bind(window), + confirm: window.confirm.bind(window), + prompt: window.prompt.bind(window) + }), + isNaN, + console, + Object, + Error, + TypeError, + RangeError, + Math, + Symbol, + Date, + Array, + BigInt, + Boolean, + Buffer, + String, + Number, + Int8Array, + Int16Array, + Int32Array, + Uint8Array, + Uint16Array, + Uint32Array, + Float32Array, + Float64Array, + ArrayBuffer, + JSON, + RegExp, + parseFloat, + parseInt, + Promise, + Function, + Map, + WeakMap, + ...this.config.contracts.defaults.exposedGlobals, + require: (dep) => { + return dep === '@sbp/sbp' + ? contractSBP + : this.config.contracts.defaults.modules[dep]; + }, + sbp: contractSBP, + fetchServerTime: async (fallback = true) => { + // If contracts need the current timestamp (for example, for metadata 'createdDate') + // they must call this function so that clients are kept synchronized to the server's + // clock, for consistency, so that if one client's clock is off, it doesn't conflict + // with other client's clocks. + // See: https://github.com/okTurtles/group-income/issues/531 + try { + const response = await this.config.fetch(`${this.config.connectionURL}/time`, { signal: this.abortController.signal }); + return handleFetchResult('text')(response); + } + catch (e) { + console.warn('[fetchServerTime] Error', e); + if (fallback) { + return new Date(sbp('chelonia/time')).toISOString(); + } + throw new ChelErrorFetchServerTimeFailed('Can not fetch server time. Please check your internet connection.'); + } + } + }); + if (contractName !== this.defContract.name) { + throw new Error(`Invalid contract name for manifest ${manifestHash}. Expected ${contractName} but got ${this.defContract.name}`); + } + this.defContractSelectors.forEach(s => { allowedSels[s] = true; }); + this.manifestToContract[manifestHash] = { + slim: contractInfo === body.contractSlim, + info: contractInfo, + contract: this.defContract + }; + }, + // Warning: avoid using this unless you know what you're doing. Prefer using /remove. + 'chelonia/private/removeImmediately': function (contractID, params) { + const state = sbp(this.config.stateSelector); + const contractName = state.contracts[contractID]?.type; + if (!contractName) { + console.error('[chelonia/private/removeImmediately] Missing contract name for contract', { contractID }); + return; + } + const manifestHash = this.config.contracts.manifests[contractName]; + if (manifestHash) { + const destructor = `${manifestHash}/${contractName}/_cleanup`; + // Check if a destructor is defined + if (sbp('sbp/selectors/fn', destructor)) { + // And call it + try { + sbp(destructor, { contractID, resync: !!params?.resync, state: state[contractID] }); + } + catch (e) { + console.error(`[chelonia/private/removeImmediately] Error at destructor for ${contractID}`, e); + } + } + } + if (params?.resync) { + // If re-syncing, keep the reference count + Object.keys(state.contracts[contractID]) + .filter((k) => k !== 'references') + .forEach((k) => this.config.reactiveDel(state.contracts[contractID], k)); + // If re-syncing, keep state._volatile.watch + Object.keys(state[contractID]) + .filter((k) => k !== '_volatile') + .forEach((k) => this.config.reactiveDel(state[contractID], k)); + if (state[contractID]._volatile) { + Object.keys(state[contractID]._volatile) + .filter((k) => k !== 'watch') + .forEach((k) => this.config.reactiveDel(state[contractID]._volatile, k)); + } + } + else { + delete this.ephemeralReferenceCount[contractID]; + if (params?.permanent) { + // Keep a 'null' state to remember permanently-deleted contracts + // (e.g., when they've been removed from the server) + this.config.reactiveSet(state.contracts, contractID, null); + } + else { + this.config.reactiveDel(state.contracts, contractID); + } + this.config.reactiveDel(state, contractID); + } + this.subscriptionSet.delete(contractID); + // calling this will make pubsub unsubscribe for events on `contractID` + sbp('okTurtles.events/emit', CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { + added: [], + removed: [contractID], + permanent: params?.permanent, + resync: params?.resync + }); + }, + // used by, e.g. 'chelonia/contract/wait' + 'chelonia/private/noop': function () { }, + 'chelonia/private/out/sync': function (contractIDs, params) { + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs; + const forcedSync = !!params?.force; + return Promise.all(listOfIds.map(contractID => { + // If this isn't a forced sync and we're already subscribed to the contract, + // only wait on the event queue (as events should come over the subscription) + if (!forcedSync && this.subscriptionSet.has(contractID)) { + const rootState = sbp(this.config.stateSelector); + // However, if the contract has been marked as dirty (meaning its state + // could be wrong due to newly received encryption keys), sync it anyhow + // (i.e., disregard the force flag and proceed to sync the contract) + if (!rootState[contractID]?._volatile?.dirty) { + return sbp('chelonia/private/queueEvent', contractID, ['chelonia/private/noop']); + } + } + // enqueue this invocation in a serial queue to ensure + // handleEvent does not get called on contractID while it's syncing, + // but after it's finished. This is used in tandem with + // queuing the 'chelonia/private/in/handleEvent' selector, defined below. + // This prevents handleEvent getting called with the wrong previousHEAD for an event. + return sbp('chelonia/private/queueEvent', contractID, [ + 'chelonia/private/in/syncContract', contractID, params + ]).catch((err) => { + console.error(`[chelonia] failed to sync ${contractID}:`, err); + throw err; // re-throw the error + }); + })); + }, + 'chelonia/private/out/publishEvent': function (entry, { maxAttempts = 5, headers, billableContractID, bearer } = {}, hooks) { + const contractID = entry.contractID(); + const originalEntry = entry; + return sbp('chelonia/private/queueEvent', `publish:${contractID}`, async () => { + let attempt = 1; + let lastAttemptedHeight; + // prepublish is asynchronous to allow for cleanly sending messages to + // different contracts + await hooks?.prepublish?.(entry); + const onreceivedHandler = (_contractID, message) => { + if (entry.hash() === message.hash()) { + sbp('okTurtles.events/off', EVENT_HANDLED, onreceivedHandler); + hooks.onprocessed(entry); + } + }; + if (typeof hooks?.onprocessed === 'function') { + sbp('okTurtles.events/on', EVENT_HANDLED, onreceivedHandler); + } + // auto resend after short random delay + // https://github.com/okTurtles/group-income/issues/608 + while (true) { + // Queued event to ensure that we send the event with whatever the + // 'latest' state may be for that contract (in case we were receiving + // something over the web socket) + // This also ensures that the state doesn't change while reading it + lastAttemptedHeight = entry.height(); + const newEntry = await sbp('chelonia/private/queueEvent', contractID, async () => { + const rootState = sbp(this.config.stateSelector); + const state = rootState[contractID]; + const isFirstMessage = entry.isFirstMessage(); + if (!state && !isFirstMessage) { + console.info(`[chelonia] Not sending message as contract state has been removed: ${entry.description()}`); + return; + } + if (hooks?.preSendCheck) { + if (!await hooks.preSendCheck(entry, state)) { + console.info(`[chelonia] Not sending message as preSendCheck hook returned non-truish value: ${entry.description()}`); + return; + } + } + // Process message to ensure that it is valid. Should this throw, + // we propagate the error. Calling `processMessage` will perform + // validation by checking signatures, well-formedness and, in the case + // of actions, by also calling both the `validate` method (which + // doesn't mutate the state) and the `process` method (which could + // mutate the state). + // `SPMessage` objects have an implicit `direction` field that's set + // based on how the object was constructed. For messages that will be + // sent to the server (this case), `direction` is set to `outgoing`. + // This `direction` affects how certain errors are reported during + // processing, and is also exposed to contracts (which could then + // alter their behavior based on this) to support some features (such + // as showing users that a certain message is 'pending'). + // Validation ensures that we don't write messages known to be invalid. + // Although those invalid messages will be ignored if sent anyhow, + // sending them is wasteful. + // The only way to know for sure if a message is valid or not is using + // the same logic that would be used if the message was received, + // hence the call to `processMessage`. Validation requires having the + // state and all mutations that would be applied. For example, when + // joining a chatroom, this is usually done by sending an OP_ATOMIC + // that contains OP_KEY_ADD and OP_ACTION_ENCRYPTED. Correctly + // validating this operation requires applying the OP_KEY_ADD to the + // state in order to know whether OP_ACTION_ENCRYPTED has a valid + // signature or not. + // We also rely on this logic to keep different contracts in sync + // when there are side-effects. For example, the side-effect in a + // group for someone joining a chatroom can call the `join` action + // on the chatroom unconditionally, since validation will prevent + // the message from being sent. + // Because of this, 'chelonia/private/in/processMessage' SHOULD NOT + // change the global Chelonia state and it MUST NOT call any + // side-effects or change the global state in a way that affects + // the meaning of any future messages or successive invocations. + // Note: mutations to the contract state, if any, are immediately + // discarded (see the temporary object created using `cloneDeep`). + await sbp('chelonia/private/in/processMessage', entry, cloneDeep(state || {})); + // if this isn't the first event (i.e., OP_CONTRACT), recreate and + // resend message + // This is mainly to set height and previousHEAD. For the first event, + // this doesn't need to be done because previousHEAD is always undefined + // and height is always 0. + // We always call recreateEvent because we may have received new events + // in the web socket + if (!isFirstMessage) { + return recreateEvent(entry, state, rootState.contracts[contractID]); + } + return entry; + }); + // If there is no event to send, return + if (!newEntry) + return; + await hooks?.beforeRequest?.(newEntry, entry); + entry = newEntry; + const r = await this.config.fetch(`${this.config.connectionURL}/event`, { + method: 'POST', + body: entry.serialize(), + headers: { + ...headers, + ...bearer && { + Authorization: `Bearer ${bearer}` + }, + ...billableContractID && { + Authorization: buildShelterAuthorizationHeader.call(this, billableContractID) + }, + 'Content-Type': 'text/plain' + }, + signal: this.abortController.signal + }); + if (r.ok) { + await hooks?.postpublish?.(entry); + return entry; + } + try { + if (r.status === 409) { + if (attempt + 1 > maxAttempts) { + console.error(`[chelonia] failed to publish ${entry.description()} after ${attempt} attempts`, entry); + throw new Error(`publishEvent: ${r.status} - ${r.statusText}. attempt ${attempt}`); + } + // create new entry + const randDelay = randomIntFromRange(0, 1500); + console.warn(`[chelonia] publish attempt ${attempt} of ${maxAttempts} failed. Waiting ${randDelay} msec before resending ${entry.description()}`); + attempt += 1; + await delay(randDelay); // wait randDelay ms before sending it again + // TODO: The [pubsub] code seems to miss events that happened between + // a call to sync and the subscription time. This is a temporary measure + // to handle this until [pubsub] is updated. + if (!entry.isFirstMessage() && entry.height() === lastAttemptedHeight) { + await sbp('chelonia/private/out/sync', contractID, { force: true }); + } + } + else { + const message = (await r.json())?.message; + console.error(`[chelonia] ERROR: failed to publish ${entry.description()}: ${r.status} - ${r.statusText}: ${message}`, entry); + throw new Error(`publishEvent: ${r.status} - ${r.statusText}: ${message}`); + } + } + catch (e) { + sbp('okTurtles.events/off', EVENT_HANDLED, onreceivedHandler); + throw e; + } + } + }).then((entry) => { + sbp('okTurtles.events/emit', EVENT_PUBLISHED, { contractID, message: entry, originalMessage: originalEntry }); + return entry; + }).catch((e) => { + sbp('okTurtles.events/emit', EVENT_PUBLISHING_ERROR, { contractID, message: entry, originalMessage: originalEntry, error: e }); + throw e; + }); + }, + 'chelonia/private/out/latestHEADinfo': function (contractID) { + return this.config.fetch(`${this.config.connectionURL}/latestHEADinfo/${contractID}`, { + cache: 'no-store', + signal: this.abortController.signal + }).then(handleFetchResult('json')); + }, + 'chelonia/private/postKeyShare': function (contractID, previousVolatileState, signingKey) { + const cheloniaState = sbp(this.config.stateSelector); + const targetState = cheloniaState[contractID]; + if (!targetState) + return; + if (previousVolatileState && has(previousVolatileState, 'watch')) { + if (!targetState._volatile) + this.config.reactiveSet(targetState, '_volatile', Object.create(null)); + if (!targetState._volatile.watch) { + this.config.reactiveSet(targetState._volatile, 'watch', previousVolatileState.watch); + } + else if (targetState._volatile.watch !== previousVolatileState.watch) { + previousVolatileState.watch.forEach((pWatch) => { + if (!targetState._volatile.watch.some((tWatch) => { + return (tWatch[0] === pWatch[0]) && (tWatch[1] === pWatch[1]); + })) { + targetState._volatile.watch.push(pWatch); + } + }); + } + } + if (!Array.isArray(targetState._volatile?.pendingKeyRequests)) + return; + this.config.reactiveSet(targetState._volatile, 'pendingKeyRequests', targetState._volatile.pendingKeyRequests.filter((pkr) => pkr?.name !== signingKey.name)); + }, + 'chelonia/private/in/processMessage': async function (message, state, internalSideEffectStack, contractName) { + const [opT, opV] = message.op(); + const hash = message.hash(); + const height = message.height(); + const contractID = message.contractID(); + const manifestHash = message.manifest(); + const signingKeyId = message.signingKeyId(); + const direction = message.direction(); + const config = this.config; + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const opName = Object.entries(SPMessage).find(([, y]) => y === opT)?.[0]; + console.debug('PROCESSING OPCODE:', opName, 'to', contractID); + if (state?._volatile?.dirty) { + console.debug('IGNORING OPCODE BECAUSE CONTRACT STATE IS MARKED AS DIRTY.', 'OPCODE:', opName, 'CONTRACT:', contractID); + return; + } + if (!state._vm) + state._vm = Object.create(null); + const opFns = { + /* + There are two types of "errors" that we need to consider: + 1. "Ignoring" errors + 2. "Failure" errors + Example: OP_KEY_ADD + 1. IGNORING: an error is thrown because we wanted to add a key but the key we wanted to add is already there. This is not a hard error, it's an ignoring error. We don't care that the operation failed in this case because the intent was accomplished. + 2. FAILURE: an error is thrown while attempting to add a key that doesn't exist. + Example: OP_ACTION_ENCRYPTED + 1. IGNORING: An error is thrown because we don't have the key to decrypt the action. We ignore it. + 2. FAILURE: An error is thrown by the process function during processing. + Handling these in OP_ATOMIC + • ALL errors of class "IGNORING" should be ignored. They should not impact our ability to process the rest of the operations in the OP_ATOMIC. No matter how many of these are thrown, it doesn't affect the rest of the operations. + • ANY error of class "FAILURE" will call the rest of the operations to fail and the state to be reverted to prior to the OP_ATOMIC. No side-effects should be run. Because an intention failed. + */ + async [SPMessage.OP_ATOMIC](v) { + for (let i = 0; i < v.length; i++) { + const u = v[i]; + try { + if (u[0] === SPMessage.OP_ATOMIC) + throw new Error('Cannot nest OP_ATOMIC'); + if (!validateKeyPermissions(message, config, state, signingKeyId, u[0], u[1])) { + throw new Error('Inside OP_ATOMIC: no matching signing key was defined'); + } + await opFns[u[0]](u[1]); + } + catch (e_) { + const e = e_; + if (e && typeof e === 'object') { + if (e.name === 'ChelErrorDecryptionKeyNotFound') { + console.warn(`[chelonia] [OP_ATOMIC] WARN '${e.name}' in processMessage for ${message.description()}: ${e.message}`, e, message.serialize()); + if (e.cause) { + const missingDecryptionKeyIds = missingDecryptionKeyIdsMap.get(message); + if (missingDecryptionKeyIds) { + missingDecryptionKeyIds.add(e.cause); + } + else { + missingDecryptionKeyIdsMap.set(message, new Set([e.cause])); + } + } + continue; + } + else { + logEvtError(message, `[chelonia] [OP_ATOMIC] ERROR '${e.name}' in processMessage for ${message.description()}: ${e.message || e}`, e, message.serialize()); + } + console.warn(`[chelonia] [OP_ATOMIC] Error processing ${message.description()}: ${message.serialize()}. Any side effects will be skipped!`); + if (config.strictProcessing) { + throw e; + } + config.hooks.processError?.(e, message, getMsgMeta.call(self, message, contractID, state)); + if (e.name === 'ChelErrorWarning') + continue; + } + else { + logEvtError(message, 'Inside OP_ATOMIC: Non-object or null error thrown', contractID, message, i, e); + } + throw e; + } + } + }, + [SPMessage.OP_CONTRACT](v) { + state._vm.type = v.type; + const keys = keysToMap.call(self, v.keys, height); + state._vm.authorizedKeys = keys; + // Loop through the keys in the contract and try to decrypt all of the private keys + // Example: in the identity contract you have the IEK, IPK, CSK, and CEK. + // When you login you have the IEK which is derived from your password, and you + // will use it to decrypt the rest of the keys which are encrypted with that. + // Specifically, the IEK is used to decrypt the CSKs and the CEKs, which are + // the encrypted versions of the CSK and CEK. + keyAdditionProcessor.call(self, message, hash, v.keys, state, contractID, signingKey, internalSideEffectStack); + }, + [SPMessage.OP_ACTION_ENCRYPTED](v) { + if (config.skipActionProcessing) { + if (!config.skipDecryptionAttempts) { + console.log('OP_ACTION_ENCRYPTED: skipped action processing'); + } + return; + } + return opFns[SPMessage.OP_ACTION_UNENCRYPTED](v.valueOf()); + }, + async [SPMessage.OP_ACTION_UNENCRYPTED](v) { + if (!config.skipActionProcessing) { + let innerSigningKeyId; + if (isSignedData(v)) { + innerSigningKeyId = v.signingKeyId; + v = v.valueOf(); + } + const { data, meta, action } = v; + if (!config.whitelisted(action)) { + throw new Error(`chelonia: action not whitelisted: '${action}'`); + } + await sbp(`${manifestHash}/${action}/process`, { + data, + meta, + hash, + height, + contractID, + direction: message.direction(), + signingKeyId, + get signingContractID() { + return getContractIDfromKeyId(contractID, signingKeyId, state); + }, + innerSigningKeyId, + get innerSigningContractID() { + return getContractIDfromKeyId(contractID, innerSigningKeyId, state); + } + }, state); + } + }, + [SPMessage.OP_KEY_SHARE](wv) { + // TODO: Prompt to user if contract not in pending + const data = config.unwrapMaybeEncryptedData(wv); + if (!data) + return; + const v = data.data; + for (const key of v.keys) { + if (key.id && key.meta?.private?.content) { + if (!has(state._vm, 'sharedKeyIds')) + state._vm.sharedKeyIds = []; + if (!state._vm.sharedKeyIds.some((sK) => sK.id === key.id)) + state._vm.sharedKeyIds.push({ id: key.id, contractID: v.contractID, height, keyRequestHash: v.keyRequestHash, keyRequestHeight: v.keyRequestHeight }); + } + } + // If this is a response to an OP_KEY_REQUEST (marked by the + // presence of the keyRequestHash attribute), then we'll mark the + // key request as completed + // TODO: Verify that the keyRequestHash is what we expect (on the + // other contact's state, we should have a matching structure in + // state._volatile.pendingKeyRequests = [ + // { contractID: "this", name: "name of this signingKeyId", reference: "this reference", hash: "KA" }, ..., but we don't + // have a copy of the keyRequestHash (this would need a new + // message to ourselves in the KR process), so for now we trust + // that if it has keyRequestHash, it's a response to a request + // we sent. + // For similar reasons, we can't check pendingKeyRequests, because + // depending on how and in which order events are processed, it may + // not be available. + // ] + if (has(v, 'keyRequestHash') && state._vm.authorizedKeys[signingKeyId].meta?.keyRequest) { + state._vm.authorizedKeys[signingKeyId].meta.keyRequest.responded = hash; + } + internalSideEffectStack?.push(async () => { + delete self.postSyncOperations[contractID]?.['pending-keys-for-' + v.contractID]; + const cheloniaState = sbp(self.config.stateSelector); + const targetState = cheloniaState[v.contractID]; + const missingDecryptionKeyIds = cheloniaState.contracts[v.contractID]?.missingDecryptionKeyIds; + let newestEncryptionKeyHeight = Number.POSITIVE_INFINITY; + for (const key of v.keys) { + if (key.id && key.meta?.private?.content) { + // Outgoing messages' keys are always transient + const transient = direction === 'outgoing' || key.meta.private.transient; + if (!sbp('chelonia/haveSecretKey', key.id, !transient)) { + try { + const decrypted = key.meta.private.content.valueOf(); + sbp('chelonia/storeSecretKeys', new Secret([{ + key: deserializeKey(decrypted), + transient + }])); + // If we've just received a known missing key (i.e., a key + // that previously resulted in a decryption error), we know + // our state is outdated and we need to re-sync the contract + if (missingDecryptionKeyIds?.includes(key.id)) { + newestEncryptionKeyHeight = Number.NEGATIVE_INFINITY; + } + else if ( + // Otherwise, we make an educated guess on whether a re-sync + // is needed based on the height. + targetState?._vm?.authorizedKeys?.[key.id]?._notBeforeHeight != null && + Array.isArray(targetState._vm.authorizedKeys[key.id].purpose) && + targetState._vm.authorizedKeys[key.id].purpose.includes('enc')) { + newestEncryptionKeyHeight = Math.min(newestEncryptionKeyHeight, targetState._vm.authorizedKeys[key.id]._notBeforeHeight); + } + } + catch (e_) { + const e = e_; + if (e?.name === 'ChelErrorDecryptionKeyNotFound') { + console.warn(`OP_KEY_SHARE (${hash} of ${contractID}) missing secret key: ${e.message}`, e); + } + else { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`OP_KEY_SHARE (${hash} of ${contractID}) error '${e.message || e}':`, e); + } + } + } + } + } + // If an encryption key has been shared with _notBefore lower than the + // current height, then the contract must be resynced. + const mustResync = !!(newestEncryptionKeyHeight < cheloniaState.contracts[v.contractID]?.height); + if (mustResync) { + if (!has(targetState, '_volatile')) + config.reactiveSet(targetState, '_volatile', Object.create(null)); + config.reactiveSet(targetState._volatile, 'dirty', true); + if (!Object.keys(targetState).some((k) => k !== '_volatile')) { + // If the contract only has _volatile state, we don't force sync it + return; + } + // Mark contracts that have foreign keys that have been received + // as dirty + // First, we group watched keys by key and contracts + const keyDict = Object.create(null); + targetState._volatile?.watch?.forEach(([keyName, contractID]) => { + if (!keyDict[keyName]) { + keyDict[keyName] = [contractID]; + return; + } + keyDict[keyName].push(contractID); + }); + // Then, see which of those contracts need to be updated + const contractIdsToUpdate = Array.from(new Set(Object.entries(keyDict).flatMap(([keyName, contractIDs]) => { + const keyId = findKeyIdByName(targetState, keyName); + if ( + // Does the key exist? (i.e., is it a current key) + keyId && + // Is it an encryption key? (signing keys don't build up a + // potentially invalid state because the private key isn't + // required for validation; however, missing encryption keys + // prevent message processing) + targetState._vm.authorizedKeys[keyId].purpose.includes('enc') && + // Is this a newly set key? (avoid re-syncing contracts that + // haven't been affected by the `OP_KEY_SHARE`) + targetState._vm.authorizedKeys[keyId]._notBeforeHeight >= newestEncryptionKeyHeight) { + return contractIDs; + } + return []; + }))); + // Mark these contracts as dirty + contractIdsToUpdate.forEach((contractID) => { + const targetState = cheloniaState[contractID]; + if (!targetState) + return; + if (!has(targetState, '_volatile')) + config.reactiveSet(targetState, '_volatile', Object.create(null)); + config.reactiveSet(targetState._volatile, 'dirty', true); + }); + // Since we have received new keys, the current contract state might be wrong, so we need to remove the contract and resync + // Note: The following may be problematic when several tabs are open + // sharing the same state. This is more of a general issue in this + // situation, not limited to the following sequence of events + if (self.subscriptionSet.has(v.contractID)) { + const resync = sbp('chelonia/private/queueEvent', v.contractID, [ + 'chelonia/private/in/syncContract', v.contractID + ]).then(() => { + // Now, if we're subscribed to any of the contracts that were + // marked as dirty, re-sync them + sbp('chelonia/private/out/sync', contractIdsToUpdate.filter((contractID) => { + return self.subscriptionSet.has(contractID); + }), { force: true, resync: true }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error('[chelonia] Error resyncing contracts with foreign key references after key rotation', e); + }); + }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`[chelonia] Error during sync for ${v.contractID} during OP_KEY_SHARE for ${contractID}`); + if (v.contractID === contractID) { + throw e; + } + }); + // If the keys received were for the current contract, we can't + // use queueEvent as we're already on that same queue + if (v.contractID !== contractID) { + await resync; + } + } + } + const previousVolatileState = targetState?._volatile; + sbp('chelonia/private/queueEvent', v.contractID, ['chelonia/private/postKeyShare', v.contractID, mustResync ? previousVolatileState : null, signingKey]) + .then(() => { + // The CONTRACT_HAS_RECEIVED_KEYS event is placed on the queue for + // the current contract so that calling + // 'chelonia/contract/waitingForKeyShareTo' will give correct results + // (i.e., the event is processed after the state is written) + sbp('chelonia/private/queueEvent', contractID, () => { + sbp('okTurtles.events/emit', CONTRACT_HAS_RECEIVED_KEYS, { contractID: v.contractID, sharedWithContractID: contractID, signingKeyId, get signingKeyName() { return state._vm?.authorizedKeys?.[signingKeyId]?.name; } }); + }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`[chelonia] Error while emitting the CONTRACT_HAS_RECEIVED_KEYS event for ${contractID}`, e); + }); + }); + }); + }, + [SPMessage.OP_KEY_REQUEST](wv) { + const data = config.unwrapMaybeEncryptedData(wv); + // If we're unable to decrypt the OP_KEY_REQUEST, then still + // proceed to do accounting of invites + const v = data?.data || { contractID: '(private)', replyWith: { context: undefined }, request: '*' }; + const originatingContractID = v.contractID; + if (state._vm?.invites?.[signingKeyId]?.quantity != null) { + if (state._vm.invites[signingKeyId].quantity > 0) { + if ((--state._vm.invites[signingKeyId].quantity) <= 0) { + state._vm.invites[signingKeyId].status = INVITE_STATUS.USED; + } + } + else { + logEvtError(message, 'Ignoring OP_KEY_REQUEST because it exceeds allowed quantity: ' + originatingContractID); + return; + } + } + if (state._vm?.invites?.[signingKeyId]?.expires != null) { + if (state._vm.invites[signingKeyId].expires < Date.now()) { + logEvtError(message, 'Ignoring OP_KEY_REQUEST because it expired at ' + state._vm.invites[signingKeyId].expires + ': ' + originatingContractID); + return; + } + } + // If skipping porocessing or if the message is outgoing, there isn't + // anything else to do + if (config.skipActionProcessing || direction === 'outgoing') { + return; + } + // Outgoing messages don't have a context attribute + if (!has(v.replyWith, 'context')) { + logEvtError(message, 'Ignoring OP_KEY_REQUEST because it is missing the context attribute'); + return; + } + const context = v.replyWith.context; + if (data && (!Array.isArray(context) || context[0] !== originatingContractID)) { + logEvtError(message, 'Ignoring OP_KEY_REQUEST because it is signed by the wrong contract'); + return; + } + if (v.request !== '*') { + logEvtError(message, 'Ignoring OP_KEY_REQUEST because it has an unsupported request attribute', v.request); + return; + } + if (!state._vm.pendingKeyshares) + state._vm.pendingKeyshares = Object.create(null); + state._vm.pendingKeyshares[message.hash()] = context + ? [ + // Full-encryption (i.e., KRS encryption) requires that this request + // was encrypted and that the invite is marked as private + !!data?.encryptionKeyId, + message.height(), + signingKeyId, + context + ] + : [ + !!data?.encryptionKeyId, + message.height(), + signingKeyId + ]; + // Call 'chelonia/private/respondToAllKeyRequests' after sync + if (data) { + internalSideEffectStack?.push(() => { + self.setPostSyncOp(contractID, 'respondToAllKeyRequests-' + message.contractID(), ['chelonia/private/respondToAllKeyRequests', contractID]); + }); + } + }, + [SPMessage.OP_KEY_REQUEST_SEEN](wv) { + if (config.skipActionProcessing) { + return; + } + // TODO: Handle boolean (success) value + const data = config.unwrapMaybeEncryptedData(wv); + if (!data) + return; + const v = data.data; + if (state._vm.pendingKeyshares && v.keyRequestHash in state._vm.pendingKeyshares) { + const hash = v.keyRequestHash; + const pending = state._vm.pendingKeyshares[hash]; + delete state._vm.pendingKeyshares[hash]; + if (pending.length !== 4) + return; + // If we were able to respond, clean up responders + const keyId = pending[2]; + const originatingContractID = pending[3][0]; + if (Array.isArray(state._vm?.invites?.[keyId]?.responses)) { + state._vm?.invites?.[keyId]?.responses.push(originatingContractID); + } + if (!has(state._vm, 'keyshares')) + state._vm.keyshares = Object.create(null); + const success = v.success; + state._vm.keyshares[hash] = { + contractID: originatingContractID, + height, + success, + ...(success && { + hash: v.keyShareHash + }) + }; + } + }, + [SPMessage.OP_PROP_DEL]: notImplemented, + [SPMessage.OP_PROP_SET](v) { + if (!state._vm.props) + state._vm.props = {}; + state._vm.props[v.key] = v.value; + }, + [SPMessage.OP_KEY_ADD](v) { + const keys = keysToMap.call(self, v, height, state._vm.authorizedKeys); + const keysArray = Object.values(v); + keysArray.forEach((k) => { + if (has(state._vm.authorizedKeys, k.id) && state._vm.authorizedKeys[k.id]._notAfterHeight == null) { + throw new ChelErrorWarning('Cannot use OP_KEY_ADD on existing keys. Key ID: ' + k.id); + } + }); + validateKeyAddPermissions.call(self, contractID, signingKey, state, v); + state._vm.authorizedKeys = { ...state._vm.authorizedKeys, ...keys }; + keyAdditionProcessor.call(self, message, hash, v, state, contractID, signingKey, internalSideEffectStack); + }, + [SPMessage.OP_KEY_DEL](v) { + if (!state._vm.authorizedKeys) + state._vm.authorizedKeys = Object.create(null); + if (!state._volatile) + state._volatile = Object.create(null); + if (!state._volatile.pendingKeyRevocations) + state._volatile.pendingKeyRevocations = Object.create(null); + validateKeyDelPermissions.call(self, contractID, signingKey, state, v); + const keyIds = v.map((k) => { + const data = config.unwrapMaybeEncryptedData(k); + if (!data) + return undefined; + return data.data; + }).filter((keyId) => { + if (!keyId || typeof keyId !== 'string') + return false; + if (!has(state._vm.authorizedKeys, keyId) || state._vm.authorizedKeys[keyId]._notAfterHeight != null) { + console.warn('Attempted to delete non-existent key from contract', { contractID, keyId }); + return false; + } + return true; + }); + keyIds.forEach((keyId) => { + const key = state._vm.authorizedKeys[keyId]; + state._vm.authorizedKeys[keyId]._notAfterHeight = height; + if (has(state._volatile.pendingKeyRevocations, keyId)) { + delete state._volatile.pendingKeyRevocations[keyId]; + } + // Are we deleting a foreign key? If so, we also need to remove + // the operation from (1) _volatile.watch (on the other contract) + // and (2) pendingWatch + if (key.foreignKey) { + const fkUrl = new URL(key.foreignKey); + const foreignContract = fkUrl.pathname; + const foreignKeyName = fkUrl.searchParams.get('keyName'); + if (!foreignContract || !foreignKeyName) + throw new Error('Invalid foreign key: missing contract or key name'); + internalSideEffectStack?.push(() => { + sbp('chelonia/private/queueEvent', foreignContract, () => { + const rootState = sbp(config.stateSelector); + if (Array.isArray(rootState[foreignContract]?._volatile?.watch)) { + // Stop watching events for this key + const oldWatch = rootState[foreignContract]._volatile.watch; + rootState[foreignContract]._volatile.watch = oldWatch.filter(([name, cID]) => name !== foreignKeyName || cID !== contractID); + if (oldWatch.length !== rootState[foreignContract]._volatile.watch.length) { + // If the number of foreign keys changed, maybe there's no + // reason to remain subscribed to this contract. In this + // case, attempt to release it. + sbp('chelonia/contract/release', foreignContract, { try: true }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`[chelonia] Error at OP_KEY_DEL internalSideEffectStack while attempting to release foreign contract ${foreignContract}`, e); + }); + } + } + }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error('Error stopping watching events after removing key', { contractID, foreignContract, foreignKeyName, fkUrl }, e); + }); + }); + const pendingWatch = state._vm.pendingWatch?.[foreignContract]; + if (pendingWatch) { + state._vm.pendingWatch[foreignContract] = pendingWatch.filter(([, kId]) => kId !== keyId); + } + } + // Set the status to revoked for invite keys + if (key.name.startsWith('#inviteKey-') && state._vm.invites[key.id]) { + state._vm.invites[key.id].status = INVITE_STATUS.REVOKED; + } + }); + // Check state._volatile.watch for contracts that should be + // mirroring this operation + if (Array.isArray(state._volatile?.watch)) { + const updatedKeysMap = Object.create(null); + keyIds.forEach((keyId) => { + updatedKeysMap[state._vm.authorizedKeys[keyId].name] = { + name: state._vm.authorizedKeys[keyId].name, + oldKeyId: keyId + }; + }); + keyRotationHelper(contractID, state, config, updatedKeysMap, [SPMessage.OP_KEY_DEL], 'chelonia/out/keyDel', (name) => updatedKeysMap[name[0]].oldKeyId, internalSideEffectStack); + } + }, + [SPMessage.OP_KEY_UPDATE](v) { + if (!state._volatile) + state._volatile = Object.create(null); + if (!state._volatile.pendingKeyRevocations) + state._volatile.pendingKeyRevocations = Object.create(null); + const [updatedKeys, updatedMap] = validateKeyUpdatePermissions.call(self, contractID, signingKey, state, v); + const keysToDelete = Object.values(updatedMap); + for (const keyId of keysToDelete) { + if (has(state._volatile.pendingKeyRevocations, keyId)) { + delete state._volatile.pendingKeyRevocations[keyId]; + } + state._vm.authorizedKeys[keyId]._notAfterHeight = height; + } + for (const key of updatedKeys) { + if (!has(state._vm.authorizedKeys, key.id)) { + key._notBeforeHeight = height; + state._vm.authorizedKeys[key.id] = cloneDeep(key); + } + } + keyAdditionProcessor.call(self, message, hash, updatedKeys, state, contractID, signingKey, internalSideEffectStack); + // Check state._volatile.watch for contracts that should be + // mirroring this operation + if (Array.isArray(state._volatile?.watch)) { + const updatedKeysMap = Object.create(null); + updatedKeys.forEach((key) => { + if (key.data) { + updatedKeysMap[key.name] = cloneDeep(key); + updatedKeysMap[key.name].oldKeyId = updatedMap[key.id]; + } + }); + keyRotationHelper(contractID, state, config, updatedKeysMap, [SPMessage.OP_KEY_UPDATE], 'chelonia/out/keyUpdate', (name) => ({ + name: name[1], + oldKeyId: updatedKeysMap[name[0]].oldKeyId, + id: updatedKeysMap[name[0]].id, + data: updatedKeysMap[name[0]].data + }), internalSideEffectStack); + } + }, + [SPMessage.OP_PROTOCOL_UPGRADE]: notImplemented + }; + if (!this.config.skipActionProcessing && !this.manifestToContract[manifestHash]) { + const rootState = sbp(this.config.stateSelector); + // Having rootState.contracts[contractID] is not enough to determine we + // have previously synced this contract, as reference counts are also + // stored there. Hence, we check for the presence of 'type' + if (!contractName) { + contractName = has(rootState.contracts, contractID) && rootState.contracts[contractID] && has(rootState.contracts[contractID], 'type') + ? rootState.contracts[contractID].type + : opT === SPMessage.OP_CONTRACT + ? opV.type + : ''; + } + if (!contractName) { + throw new Error(`Unable to determine the name for a contract and refusing to load it (contract ID was ${contractID} and its manifest hash was ${manifestHash})`); + } + await sbp('chelonia/private/loadManifest', contractName, manifestHash); + } + let processOp = true; + if (config.preOp) { + processOp = config.preOp(message, state) !== false && processOp; + } + let signingKey; + // Signature verification + { + // This sync code has potential issues + // The first issue is that it can deadlock if there are circular references + // The second issue is that it doesn't handle key rotation. If the key used for signing is invalidated / removed from the originating contract, we won't have it in the state + // Both of these issues can be resolved by introducing a parameter with the message ID the state is based on. This requires implementing a separate, ephemeral, state container for operations that refer to a different contract. + // The difficulty of this is how to securely determine the message ID to use. + // The server can assist with this. + const stateForValidation = opT === SPMessage.OP_CONTRACT && !state?._vm?.authorizedKeys + ? { + _vm: { + authorizedKeys: keysToMap.call(this, opV.keys, height) + } + } + : state; + // Verify that the signing key is found, has the correct purpose and is + // allowed to sign this particular operation + if (!validateKeyPermissions(message, config, stateForValidation, signingKeyId, opT, opV)) { + throw new Error('No matching signing key was defined'); + } + signingKey = stateForValidation._vm.authorizedKeys[signingKeyId]; + } + if (config[`preOp_${opT}`]) { + processOp = config[`preOp_${opT}`](message, state) !== false && processOp; + } + if (processOp) { + await opFns[opT](opV); + config.postOp?.(message, state); + config[`postOp_${opT}`]?.(message, state); // hack to fix syntax highlighting ` + } + }, + 'chelonia/private/in/enqueueHandleEvent': function (contractID, event) { + // make sure handleEvent is called AFTER any currently-running invocations + // to 'chelonia/private/out/sync', to prevent gi.db from throwing + // "bad previousHEAD" errors + return sbp('chelonia/private/queueEvent', contractID, async () => { + await sbp('chelonia/private/in/handleEvent', contractID, event); + // Before the next operation is enqueued, enqueue post sync ops. This + // makes calling `/wait` more reliable + sbp('chelonia/private/enqueuePostSyncOps', contractID); + }); + }, + 'chelonia/private/in/syncContract': async function (contractID, params) { + const state = sbp(this.config.stateSelector); + if (state.contracts[contractID] === null) { + throw new ChelErrorResourceGone('Cannot sync permanently deleted contract ' + contractID); + } + try { + this.currentSyncs[contractID] = { firstSync: !state.contracts[contractID]?.type }; + sbp('okTurtles.events/emit', CONTRACT_IS_SYNCING, contractID, true); + const currentVolatileState = state[contractID]?._volatile || Object.create(null); + // If the dirty flag is set (indicating that new encryption keys were received), + // we remove the current state before syncing (this has the effect of syncing + // from the beginning, recreating the entire state). When this is the case, + // the _volatile state is preserved + if (currentVolatileState?.dirty || params?.resync) { + delete currentVolatileState.dirty; + currentVolatileState.resyncing = true; + sbp('chelonia/private/removeImmediately', contractID, { resync: true }); + this.config.reactiveSet(state, contractID, Object.create(null)); + this.config.reactiveSet(state[contractID], '_volatile', currentVolatileState); + } + const { HEAD: latestHEAD } = await sbp('chelonia/out/latestHEADInfo', contractID); + console.debug(`[chelonia] syncContract: ${contractID} latestHash is: ${latestHEAD}`); + // there is a chance two users are logged in to the same machine and must check their contracts before syncing + const { HEAD: recentHEAD, height: recentHeight } = state.contracts[contractID] || {}; + const isSubscribed = this.subscriptionSet.has(contractID); + if (!isSubscribed) { + const entry = this.pending.find((entry) => entry?.contractID === contractID); + // we're syncing a contract for the first time, make sure to add to pending + // so that handleEvents knows to expect events from this contract + if (!entry) { + this.pending.push({ contractID }); + } + } + this.postSyncOperations[contractID] = this.postSyncOperations[contractID] ?? Object.create(null); + if (latestHEAD !== recentHEAD) { + console.debug(`[chelonia] Synchronizing Contract ${contractID}: our recent was ${recentHEAD || 'undefined'} but the latest is ${latestHEAD}`); + // TODO: fetch events from localStorage instead of server if we have them + const eventsStream = sbp('chelonia/out/eventsAfter', contractID, recentHeight ?? 0, undefined, recentHEAD ?? contractID); + // Sanity check: verify event with latest hash exists in list of events + // TODO: using findLastIndex, it will be more clean but it needs Cypress 9.7+ which has bad performance + // https://docs.cypress.io/guides/references/changelog#9-7-0 + // https://github.com/cypress-io/cypress/issues/22868 + let latestHashFound = false; + const eventReader = eventsStream.getReader(); + // remove the first element in cases where we are not getting the contract for the first time + for (let skip = has(state.contracts, contractID) && has(state.contracts[contractID], 'HEAD');; skip = false) { + const { done, value: event } = await eventReader.read(); + if (done) { + if (!latestHashFound) { + throw new ChelErrorForkedChain(`expected hash ${latestHEAD} in list of events for contract ${contractID}`); + } + break; + } + if (!latestHashFound) { + latestHashFound = SPMessage.deserializeHEAD(event).hash === latestHEAD; + } + if (skip) + continue; + // this must be called directly, instead of via enqueueHandleEvent + await sbp('chelonia/private/in/handleEvent', contractID, event); + } + } + else if (!isSubscribed) { + this.subscriptionSet.add(contractID); + sbp('okTurtles.events/emit', CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { added: [contractID], removed: [] }); + const entryIndex = this.pending.findIndex((entry) => entry?.contractID === contractID); + if (entryIndex !== -1) { + this.pending.splice(entryIndex, 1); + } + console.debug(`[chelonia] added already synchronized ${contractID} to subscription set`); + } + else { + console.debug(`[chelonia] contract ${contractID} was already synchronized`); + } + // Do not await here as the post-sync ops might themselves might be + // waiting on the same queue, causing a deadlock + sbp('chelonia/private/enqueuePostSyncOps', contractID); + } + catch (e) { + console.error(`[chelonia] syncContract error: ${e.message || e}`, e); + this.config.hooks.syncContractError?.(e, contractID); + throw e; + } + finally { + if (state[contractID]?._volatile?.resyncing) { + this.config.reactiveDel(state[contractID]._volatile, 'resyncing'); + } + delete this.currentSyncs[contractID]; + sbp('okTurtles.events/emit', CONTRACT_IS_SYNCING, contractID, false); + } + }, + 'chelonia/private/enqueuePostSyncOps': function (contractID) { + if (!has(this.postSyncOperations, contractID)) + return; + // Iterate over each post-sync operation associated with the given contractID. + Object.entries(this.postSyncOperations[contractID]).forEach(([key, op]) => { + // Remove the operation which is about to be handled so that subsequent + // calls to this selector don't result in repeat calls to the post-sync op + delete this.postSyncOperations[contractID][key]; + // Queue the current operation for execution. + // Note that we do _not_ await because it could be unsafe to do so. + // If the operation fails for some reason, just log the error. + sbp('chelonia/private/queueEvent', contractID, op).catch((e) => { + console.error(`Post-sync operation for ${contractID} failed`, { contractID, op, error: e }); + }); + }); + }, + 'chelonia/private/watchForeignKeys': function (externalContractID) { + const state = sbp(this.config.stateSelector); + const externalContractState = state[externalContractID]; + const pendingWatch = externalContractState?._vm?.pendingWatch; + if (!pendingWatch || !Object.keys(pendingWatch).length) + return; + const signingKey = findSuitableSecretKeyId(externalContractState, [SPMessage.OP_KEY_DEL], ['sig']); + const canMirrorOperations = !!signingKey; + // Only sync contract if we are actually able to mirror key operations + // This avoids exponentially growing the number of contracts that we need + // to be subscribed to. + // Otherwise, every time there is a foreign key, we would subscribe to that + // contract, plus the contracts referenced by the foreign keys of that + // contract, plus those contracts referenced by the foreign keys of those + // other contracts and so on. + if (!canMirrorOperations) { + console.info('[chelonia/private/watchForeignKeys]: Returning as operations cannot be mirrored', { externalContractID }); + return; + } + // For each pending watch operation, queue a synchronization event in the + // respective contract queue + Object.entries(pendingWatch).forEach(([contractID, keys]) => { + if (!Array.isArray(keys) || + // Check that the keys exist and haven't been revoked + !keys.reduce((acc, [, id]) => { + return acc || has(externalContractState._vm.authorizedKeys, id); + }, false)) { + console.info('[chelonia/private/watchForeignKeys]: Skipping as none of the keys to watch exist', { + externalContractID, + contractID + }); + return; + } + sbp('chelonia/private/queueEvent', contractID, ['chelonia/private/in/syncContractAndWatchKeys', contractID, externalContractID]).catch((e) => { + console.error(`Error at syncContractAndWatchKeys for contractID ${contractID} and externalContractID ${externalContractID}`, e); + }); + }); + }, + 'chelonia/private/in/syncContractAndWatchKeys': async function (contractID, externalContractID) { + const rootState = sbp(this.config.stateSelector); + const externalContractState = rootState[externalContractID]; + const pendingWatch = externalContractState?._vm?.pendingWatch?.[contractID]?.splice(0); + // We duplicate the check in 'chelonia/private/watchForeignKeys' because + // new events may have been received in the meantime. This avoids + // unnecessarily subscribing to the contract + if (!Array.isArray(pendingWatch) || + // Check that the keys exist and haven't been revoked + !pendingWatch.reduce((acc, [, id]) => { + return acc || (has(externalContractState._vm.authorizedKeys, id) && + findKeyIdByName(externalContractState, externalContractState._vm.authorizedKeys[id].name) != null); + }, false)) { + console.info('[chelonia/private/syncContractAndWatchKeys]: Skipping as none of the keys to watch exist', { + externalContractID, + contractID + }); + return; + } + // We check this.subscriptionSet to see if we're already + // subscribed to the contract; if not, we call sync. + if (!this.subscriptionSet.has(contractID)) { + await sbp('chelonia/private/in/syncContract', contractID); + } + const contractState = rootState[contractID]; + const keysToDelete = []; + const keysToUpdate = []; + pendingWatch.forEach(([keyName, externalId]) => { + // Does the key exist? If not, it has probably been removed and instead + // of waiting, we need to remove it ourselves + const keyId = findKeyIdByName(contractState, keyName); + if (!keyId) { + keysToDelete.push(externalId); + return; + } + else if (keyId !== externalId) { + // Or, the key has been updated and we need to update it in the external + // contract as well + keysToUpdate.push(externalId); + } + // Add keys to watchlist as another contract is waiting on these + // operations + if (!contractState._volatile) { + this.config.reactiveSet(contractState, '_volatile', Object.create(null, { watch: { value: [[keyName, externalContractID]], configurable: true, enumerable: true, writable: true } })); + } + else { + if (!contractState._volatile.watch) + this.config.reactiveSet(contractState._volatile, 'watch', [[keyName, externalContractID]]); + if (Array.isArray(contractState._volatile.watch) && !contractState._volatile.watch.find((v) => v[0] === keyName && v[1] === externalContractID)) + contractState._volatile.watch.push([keyName, externalContractID]); + } + }); + // If there are keys that need to be revoked, queue an event to handle the + // deletion + if (keysToDelete.length || keysToUpdate.length) { + if (!externalContractState._volatile) { + this.config.reactiveSet(externalContractState, '_volatile', Object.create(null)); + } + if (!externalContractState._volatile.pendingKeyRevocations) { + this.config.reactiveSet(externalContractState._volatile, 'pendingKeyRevocations', Object.create(null)); + } + keysToDelete.forEach((id) => this.config.reactiveSet(externalContractState._volatile.pendingKeyRevocations, id, 'del')); + keysToUpdate.forEach((id) => this.config.reactiveSet(externalContractState._volatile.pendingKeyRevocations, id, true)); + sbp('chelonia/private/queueEvent', externalContractID, ['chelonia/private/deleteOrRotateRevokedKeys', externalContractID]).catch((e) => { + console.error(`Error at deleteOrRotateRevokedKeys for contractID ${contractID} and externalContractID ${externalContractID}`, e); + }); + } + }, + // The following function gets called when we start watching a contract for + // foreign keys for the first time, and it ensures that, at the point the + // watching starts, keys are in sync between the two contracts (later on, + // this will be handled automatically for incoming OP_KEY_DEL and + // OP_KEY_UPDATE). + // For any given foreign key, there are three possible states: + // 1. The key is in sync with the foreign contract. In this case, there's + // nothing left to do. + // 2. The key has been rotated in the foreign contract (replaced by another + // key of the same name). We need to mirror this operation manually + // since watching only affects new messages we receive. + // 3. The key has been removed in the foreign contract. We also need to + // mirror the operation. + 'chelonia/private/deleteOrRotateRevokedKeys': function (contractID) { + const rootState = sbp(this.config.stateSelector); + const contractState = rootState[contractID]; + const pendingKeyRevocations = contractState?._volatile?.pendingKeyRevocations; + if (!pendingKeyRevocations || Object.keys(pendingKeyRevocations).length === 0) + return; + // First, we handle keys that have been rotated + const keysToUpdate = Object.entries(pendingKeyRevocations).filter(([, v]) => v === true).map(([id]) => id); + // Aggregate the keys that we can update to send them in a single operation + const [, keyUpdateSigningKeyId, keyUpdateArgs] = keysToUpdate.reduce((acc, keyId) => { + const key = contractState._vm?.authorizedKeys?.[keyId]; + if (!key || !key.foreignKey) + return acc; + const foreignKey = String(key.foreignKey); + const fkUrl = new URL(foreignKey); + const foreignContractID = fkUrl.pathname; + const foreignKeyName = fkUrl.searchParams.get('keyName'); + if (!foreignKeyName) + throw new Error('Missing foreign key name'); + const foreignState = rootState[foreignContractID]; + if (!foreignState) + return acc; + const fKeyId = findKeyIdByName(foreignState, foreignKeyName); + if (!fKeyId) { + // Key was deleted; mark it for deletion + if (pendingKeyRevocations[keyId] === true) { + this.config.reactiveSet(pendingKeyRevocations, keyId, 'del'); + } + return acc; + } + const [currentRingLevel, currentSigningKeyId, currentKeyArgs] = acc; + const ringLevel = Math.min(currentRingLevel, key.ringLevel ?? Number.POSITIVE_INFINITY); + if (ringLevel >= currentRingLevel) { + currentKeyArgs.push({ + name: key.name, + oldKeyId: keyId, + id: fKeyId, + data: foreignState._vm.authorizedKeys[fKeyId].data + }); + return [currentRingLevel, currentSigningKeyId, currentKeyArgs]; + } + else if (Number.isFinite(ringLevel)) { + const signingKeyId = findSuitableSecretKeyId(contractState, [SPMessage.OP_KEY_UPDATE], ['sig'], ringLevel); + if (signingKeyId) { + currentKeyArgs.push({ + name: key.name, + oldKeyId: keyId, + id: fKeyId, + data: foreignState._vm.authorizedKeys[fKeyId].data + }); + return [ringLevel, signingKeyId, currentKeyArgs]; + } + } + return acc; + }, [Number.POSITIVE_INFINITY, '', []]); + if (keyUpdateArgs.length !== 0) { + const contractName = contractState._vm.type; + // This is safe to do without await because it's sending an operation + // Using await could deadlock when retrying to send the message + sbp('chelonia/out/keyUpdate', { contractID, contractName, data: keyUpdateArgs, signingKeyId: keyUpdateSigningKeyId }).catch((e) => { + console.error(`[chelonia/private/deleteOrRotateRevokedKeys] Error sending OP_KEY_UPDATE for ${contractID}`, e.message); + }); + } + // And then, we handle keys that have been deleted + const keysToDelete = Object.entries(pendingKeyRevocations).filter(([, v]) => v === 'del').map(([id]) => id); + // Aggregate the keys that we can delete to send them in a single operation + const [, keyDelSigningKeyId, keyIdsToDelete] = keysToDelete.reduce((acc, keyId) => { + const [currentRingLevel, currentSigningKeyId, currentKeyIds] = acc; + const ringLevel = Math.min(currentRingLevel, contractState._vm?.authorizedKeys?.[keyId]?.ringLevel ?? Number.POSITIVE_INFINITY); + if (ringLevel >= currentRingLevel) { + currentKeyIds.push(keyId); + return [currentRingLevel, currentSigningKeyId, currentKeyIds]; + } + else if (Number.isFinite(ringLevel)) { + const signingKeyId = findSuitableSecretKeyId(contractState, [SPMessage.OP_KEY_DEL], ['sig'], ringLevel); + if (signingKeyId) { + currentKeyIds.push(keyId); + return [ringLevel, signingKeyId, currentKeyIds]; + } + } + return acc; + }, [Number.POSITIVE_INFINITY, '', []]); + if (keyIdsToDelete.length !== 0) { + const contractName = contractState._vm.type; + // This is safe to do without await because it's sending an operation + // Using await could deadlock when retrying to send the message + sbp('chelonia/out/keyDel', { contractID, contractName, data: keyIdsToDelete, signingKeyId: keyDelSigningKeyId }).catch((e) => { + console.error(`[chelonia/private/deleteRevokedKeys] Error sending OP_KEY_DEL for ${contractID}`, e.message); + }); + } + }, + 'chelonia/private/respondToAllKeyRequests': function (contractID) { + const state = sbp(this.config.stateSelector); + const contractState = state[contractID] ?? {}; + const pending = contractState?._vm?.pendingKeyshares; + if (!pending) + return; + const signingKeyId = findSuitableSecretKeyId(contractState, [SPMessage.OP_ATOMIC, SPMessage.OP_KEY_REQUEST_SEEN, SPMessage.OP_KEY_SHARE], ['sig']); + if (!signingKeyId) { + console.log('Unable to respond to key request because there is no suitable secret key with OP_KEY_REQUEST_SEEN permission'); + return; + } + Object.entries(pending).map(([hash, entry]) => { + if (!Array.isArray(entry) || entry.length !== 4) { + return undefined; + } + const [, , , [originatingContractID]] = entry; + return sbp('chelonia/private/queueEvent', originatingContractID, ['chelonia/private/respondToKeyRequest', contractID, signingKeyId, hash]).catch((e) => { + console.error(`respondToAllKeyRequests: Error responding to key request ${hash} from ${originatingContractID} to ${contractID}`, e); + }); + }); + }, + 'chelonia/private/respondToKeyRequest': async function (contractID, signingKeyId, hash) { + const state = sbp(this.config.stateSelector); + const contractState = state[contractID]; + const entry = contractState?._vm?.pendingKeyshares?.[hash]; + const instance = this._instance; + if (!Array.isArray(entry) || entry.length !== 4) { + return; + } + const [keyShareEncryption, height, , [originatingContractID, rv, originatingContractHeight, headJSON]] = entry; + entry.pop(); + const krsEncryption = !!contractState._vm.authorizedKeys?.[signingKeyId]?._private; + // 1. Sync (originating) identity contract + await sbp('chelonia/private/in/syncContract', originatingContractID); + if (instance !== this._instance) + return; + const originatingState = state[originatingContractID]; + const contractName = state.contracts[contractID].type; + const originatingContractName = originatingState._vm.type; + const v = signedIncomingData(originatingContractID, originatingState, rv, originatingContractHeight, headJSON).valueOf(); + // 2. Verify 'data' + const { encryptionKeyId } = v; + const responseKey = encryptedIncomingData(contractID, contractState, v.responseKey, height, this.transientSecretKeys, headJSON).valueOf(); + const deserializedResponseKey = deserializeKey(responseKey); + const responseKeyId = keyId(deserializedResponseKey); + // This is safe to do without await because it's sending actions + // If we had await it could deadlock when retrying to send the event + Promise.resolve().then(() => { + if (instance !== this._instance) + return; + if (!has(originatingState._vm.authorizedKeys, responseKeyId) || originatingState._vm.authorizedKeys[responseKeyId]._notAfterHeight != null) { + throw new Error(`Unable to respond to key request for ${originatingContractID}. Key ${responseKeyId} is not valid.`); + } + // We don't need to worry about persistence (if it was an outgoing + // message) here as this is done from an internal side-effect. + sbp('chelonia/storeSecretKeys', new Secret([ + { key: deserializedResponseKey } + ])); + const keys = pick(state.secretKeys, Object.entries(contractState._vm.authorizedKeys) + .filter(([, key]) => !!key.meta?.private?.shareable) + .map(([kId]) => kId)); + if (!keys || Object.keys(keys).length === 0) { + console.info('respondToAllKeyRequests: no keys to share', { contractID, originatingContractID }); + return; + } + const keySharePayload = { + contractID, + keys: Object.entries(keys).map(([keyId, key]) => ({ + id: keyId, + meta: { + private: { + content: encryptedOutgoingData(originatingContractID, encryptionKeyId, key), + shareable: true + } + } + })), + keyRequestHash: hash, + keyRequestHeight: height + }; + // 3. Send OP_KEY_SHARE to identity contract + if (!contractState?._vm?.pendingKeyshares?.[hash]) { + // While we were getting ready, another client may have shared the keys + return; + } + return keySharePayload; + }).then((keySharePayload) => { + if (instance !== this._instance || !keySharePayload) + return; + return sbp('chelonia/out/keyShare', { + contractID: originatingContractID, + contractName: originatingContractName, + data: keyShareEncryption + ? encryptedOutgoingData(originatingContractID, findSuitablePublicKeyIds(originatingState, [SPMessage.OP_KEY_SHARE], ['enc'])?.[0] || '', keySharePayload) + : keySharePayload, + signingKeyId: responseKeyId + }).then((msg) => { + if (instance !== this._instance) + return; + // 4(i). Remove originating contract and update current contract with information + const payload = { keyRequestHash: hash, keyShareHash: msg.hash(), success: true }; + const connectionKeyPayload = { + contractID: originatingContractID, + keys: [ + { + id: responseKeyId, + meta: { + private: { + content: encryptedOutgoingData(contractID, findSuitablePublicKeyIds(contractState, [SPMessage.OP_KEY_REQUEST_SEEN], ['enc'])?.[0] || '', responseKey), + shareable: true + } + } + } + ] + }; + // This is safe to do without await because it's sending an action + // If we had await it could deadlock when retrying to send the event + sbp('chelonia/out/atomic', { + contractID, + contractName, + signingKeyId, + data: [ + [ + 'chelonia/out/keyRequestResponse', + { + data: krsEncryption + ? encryptedOutgoingData(contractID, findSuitablePublicKeyIds(contractState, [SPMessage.OP_KEY_REQUEST_SEEN], ['enc'])?.[0] || '', payload) + : payload + } + ], + [ + // Upon successful key share, we want to share deserializedResponseKey + // with ourselves + 'chelonia/out/keyShare', + { + data: keyShareEncryption + ? encryptedOutgoingData(contractID, findSuitablePublicKeyIds(contractState, [SPMessage.OP_KEY_SHARE], ['enc'])?.[0] || '', connectionKeyPayload) + : connectionKeyPayload + } + ] + ] + }).catch((e) => { + console.error('Error at respondToKeyRequest while sending keyRequestResponse', e); + }); + }); + }).catch((e) => { + console.error('Error at respondToKeyRequest', e); + const payload = { keyRequestHash: hash, success: false }; + // 4(ii). Remove originating contract and update current contract with information + if (!contractState?._vm?.pendingKeyshares?.[hash]) { + // While we were getting ready, another client may have shared the keys + return; + } + // This is safe to do without await because it's sending an action + // If we had await it could deadlock when retrying to send the event + sbp('chelonia/out/keyRequestResponse', { + contractID, + contractName, + signingKeyId, + data: krsEncryption + ? encryptedOutgoingData(contractID, findSuitablePublicKeyIds(contractState, [SPMessage.OP_KEY_REQUEST_SEEN], ['enc'])?.[0] || '', payload) + : payload + }).catch((e) => { + console.error('Error at respondToKeyRequest while sending keyRequestResponse in error handler', e); + }); + }); + }, + 'chelonia/private/in/handleEvent': async function (contractID, rawMessage) { + const state = sbp(this.config.stateSelector); + const { preHandleEvent, postHandleEvent, handleEventError } = this.config.hooks; + let processingErrored = false; + let message; + // Errors in mutations result in ignored messages + // Errors in side effects result in dropped messages to be reprocessed + try { + // verify we're expecting to hear from this contract + if (!this.config.acceptAllMessages && !this.pending.some((entry) => entry?.contractID === contractID) && !this.subscriptionSet.has(contractID)) { + console.warn(`[chelonia] WARN: ignoring unexpected event for ${contractID}:`, rawMessage); + return; + } + // contractStateCopy has a copy of the current contract state, or an empty + // object if the state doesn't exist. This copy will be used to apply + // any changes from processing the current event as well as when calling + // side-effects and, once everything is processed, it will be applied + // to the global state. Important note: because the state change is + // applied to the Vuex state only if process is successful (and after both + // process and the sideEffect finish), any sideEffects that need to the + // access the state should do so only through the state that is passed in + // to the call to the sideEffect, or through a call though queueInvocation + // (so that the side effect runs after the changes are applied) + const contractStateCopy = state[contractID] ? cloneDeep(state[contractID]) : Object.create(null); + // Now, deserialize the messsage + // The message is deserialized *here* and not earlier because deserialize + // constructs objects of signedIncomingData and encryptedIncomingData + // which are bound to the state. For some opcodes (such as OP_ATOMIC), the + // state could change in ways that are significant for further processing, + // so those objects need to be bound to the state copy (which is mutated) + // as opposed to the the root state (which is mutated only after + // processing is done). + // For instance, let's say the message contains an OP_ATOMIC comprising + // two operations: OP_KEY_ADD (adding a signing key) and OP_ACTION_ENCRYPTED + // (with an inner signature using this key in OP_KEY_ADD). If the state + // is bound to the copy (as below), then by the time OP_ACTION_ENCRYPTED + // is processed, the result of OP_KEY_ADD has been applied to the state + // copy. If we didn't specify a state or instead grabbed it from the root + // state, then we wouldn't be able to process OP_ACTION_ENCRYPTED correctly, + // as we wouldn't know that the key is valid from that state, and the + // state copy (contractStateCopy) is only written to the root state after + // all processing has completed. + message = SPMessage.deserialize(rawMessage, this.transientSecretKeys, contractStateCopy, this.config.unwrapMaybeEncryptedData); + if (message.contractID() !== contractID) { + throw new Error(`[chelonia] Wrong contract ID. Expected ${contractID} but got ${message.contractID()}`); + } + if (!message.isFirstMessage() && (!has(state.contracts, contractID) || !has(state, contractID))) { + throw new ChelErrorUnrecoverable('The event is not for a first message but the contract state is missing'); + } + preHandleEvent?.(message); + // the order the following actions are done is critically important! + // first we make sure we can save this message to the db + // if an exception is thrown here we do not need to revert the state + // because nothing has been processed yet + const proceed = handleEvent.checkMessageOrdering.call(this, message); + if (proceed === false) + return; + // If the contract was marked as dirty, we stop processing + // The 'dirty' flag is set, possibly *by another contract*, indicating + // that a previously unknown encryption key has been received. This means + // that the current state is invalid (because it could changed based on + // this new information) and we must re-sync the contract. When this + // happens, we stop processing because the state will be regenerated. + if (state[contractID]?._volatile?.dirty) { + console.info(`[chelonia] Ignoring message ${message.description()} as the contract is marked as dirty`); + return; + } + const internalSideEffectStack = !this.config.skipSideEffects ? [] : undefined; + // process the mutation on the state + // IMPORTANT: even though we 'await' processMutation, everything in your + // contract's 'process' function must be synchronous! The only + // reason we 'await' here is to dynamically load any new contract + // source / definitions specified by the SPMessage + missingDecryptionKeyIdsMap.delete(message); + try { + await handleEvent.processMutation.call(this, message, contractStateCopy, internalSideEffectStack); + } + catch (e_) { + const e = e_; + if (e?.name === 'ChelErrorDecryptionKeyNotFound') { + console.warn(`[chelonia] WARN '${e.name}' in processMutation for ${message.description()}: ${e.message}`, e, message.serialize()); + if (e.cause) { + const missingDecryptionKeyIds = missingDecryptionKeyIdsMap.get(message); + if (missingDecryptionKeyIds) { + missingDecryptionKeyIds.add(e.cause); + } + else { + missingDecryptionKeyIdsMap.set(message, new Set([e.cause])); + } + } + } + else { + console.error(`[chelonia] ERROR '${e.name}' in processMutation for ${message.description()}: ${e.message || e}`, e, message.serialize()); + } + // we revert any changes to the contract state that occurred, ignoring this mutation + console.warn(`[chelonia] Error processing ${message.description()}: ${message.serialize()}. Any side effects will be skipped!`); + if (this.config.strictProcessing) { + throw e; + } + processingErrored = e?.name !== 'ChelErrorWarning'; + this.config.hooks.processError?.(e, message, getMsgMeta.call(this, message, contractID, contractStateCopy)); + // special error that prevents the head from being updated, effectively killing the contract + if (e.name === 'ChelErrorUnrecoverable' || + e.name === 'ChelErrorForkedChain' || + message.isFirstMessage()) { + throw e; + } + } + // process any side-effects (these must never result in any mutation to the contract state!) + if (!processingErrored) { + // Gets run get when skipSideEffects is false + if (Array.isArray(internalSideEffectStack) && internalSideEffectStack.length > 0) { + await Promise.all(internalSideEffectStack.map(fn => Promise.resolve(fn({ state: contractStateCopy, message: message })).catch((e_) => { + const e = e_; + console.error(`[chelonia] ERROR '${e.name}' in internal side effect for ${message.description()}: ${e.message}`, e, { message: message.serialize() }); + }))); + } + if (!this.config.skipActionProcessing && !this.config.skipSideEffects) { + await handleEvent.processSideEffects.call(this, message, contractStateCopy)?.catch((e_) => { + const e = e_; + console.error(`[chelonia] ERROR '${e.name}' in sideEffect for ${message.description()}: ${e.message}`, e, { message: message.serialize() }); + // We used to revert the state and rethrow the error here, but we no longer do that + // see this issue for why: https://github.com/okTurtles/group-income/issues/1544 + this.config.hooks.sideEffectError?.(e, message); + }); + } + } + // We keep changes to the contract state and state.contracts as close as + // possible in the code to reduce the chances of still ending up with + // an inconsistent state if a sudden failure happens while this code + // is executing. In particular, everything in between should be synchronous. + // This block will apply all the changes related to modifying the state + // after an event has been processed: + // 1. Adding the messge to the DB + // 2. Applying changes to the contract state + // 3. Applying changes to rootState.contracts + try { + const state = sbp(this.config.stateSelector); + await handleEvent.applyProcessResult.call(this, { message, state, contractState: contractStateCopy, processingErrored, postHandleEvent }); + } + catch (e_) { + const e = e_; + console.error(`[chelonia] ERROR '${e.name}' for ${message.description()} marking the event as processed: ${e.message}`, e, { message: message.serialize() }); + } + } + catch (e_) { + const e = e_; + console.error(`[chelonia] ERROR in handleEvent: ${e.message || e}`, e); + try { + handleEventError?.(e, message); + } + catch (e2) { + console.error('[chelonia] Ignoring user error in handleEventError hook:', e2); + } + throw e; + } + finally { + if (message) { + missingDecryptionKeyIdsMap.delete(message); + } + } + } +}); +const eventsToReingest = []; +const reprocessDebounced = debounce((contractID) => sbp('chelonia/private/out/sync', contractID, { force: true }).catch((e) => { + console.error(`[chelonia] Error at reprocessDebounced for ${contractID}`, e); +}), 1000); +const handleEvent = { + checkMessageOrdering(message) { + const contractID = message.contractID(); + const hash = message.hash(); + const height = message.height(); + const state = sbp(this.config.stateSelector); + // The latest height we want to use is the one from `state.contracts` and + // not the one from the DB. The height in the state reflects the latest + // message that's been processed, which is desired here. On the other hand, + // the DB function includes the latest known message for that contract, + // which can be ahead of the latest message processed. + const latestProcessedHeight = state.contracts[contractID]?.height; + if (!Number.isSafeInteger(height)) { + throw new ChelErrorDBBadPreviousHEAD(`Message ${hash} in contract ${contractID} has an invalid height.`); + } + // Avoid re-processing already processed messages + if (message.isFirstMessage() + // If this is the first message, the height is is expected not to exist + ? latestProcessedHeight != null + // If this isn't the first message, the height must not be lower than the + // current's message height. The check is negated to handle NaN values + : !(latestProcessedHeight < height)) { + // The web client may sometimes get repeated messages. If strict ordering + // isn't enabled, instead of throwing we return false. + // On the other hand, the server must enforce strict ordering. + if (!this.config.strictOrdering) { + return false; + } + throw new ChelErrorAlreadyProcessed(`Message ${hash} with height ${height} in contract ${contractID} has already been processed. Current height: ${latestProcessedHeight}.`); + } + // If the message is from the future, add it to eventsToReingest + if ((latestProcessedHeight + 1) < height) { + if (this.config.strictOrdering) { + throw new ChelErrorDBBadPreviousHEAD(`Unexpected message ${hash} with height ${height} in contract ${contractID}: height is too high. Current height: ${latestProcessedHeight}.`); + } + // sometimes we simply miss messages, it's not clear why, but it happens + // in rare cases. So we attempt to re-sync this contract once + if (eventsToReingest.length > 100) { + throw new ChelErrorUnrecoverable('more than 100 different bad previousHEAD errors'); + } + if (!eventsToReingest.includes(hash)) { + console.warn(`[chelonia] WARN bad previousHEAD for ${message.description()}, will attempt to re-sync contract to reingest message`); + eventsToReingest.push(hash); + reprocessDebounced(contractID); + return false; // ignore the error for now + } + else { + console.error(`[chelonia] ERROR already attempted to reingest ${message.description()}, will not attempt again!`); + throw new ChelErrorDBBadPreviousHEAD(`Already attempted to reingest ${hash}`); + } + } + const reprocessIdx = eventsToReingest.indexOf(hash); + if (reprocessIdx !== -1) { + console.warn(`[chelonia] WARN: successfully reingested ${message.description()}`); + eventsToReingest.splice(reprocessIdx, 1); + } + }, + async processMutation(message, state, internalSideEffectStack) { + const contractID = message.contractID(); + if (message.isFirstMessage()) { + // Allow having _volatile but nothing else if this is the first message, + // as we should be starting off with a clean state + if (Object.keys(state).some(k => k !== '_volatile')) { + throw new ChelErrorUnrecoverable(`state for ${contractID} is already set`); + } + } + await sbp('chelonia/private/in/processMessage', message, state, internalSideEffectStack); + }, + processSideEffects(message, state) { + const opT = message.opType(); + if (![SPMessage.OP_ATOMIC, SPMessage.OP_ACTION_ENCRYPTED, SPMessage.OP_ACTION_UNENCRYPTED].includes(opT)) { + return; + } + const contractID = message.contractID(); + const manifestHash = message.manifest(); + const hash = message.hash(); + const height = message.height(); + const signingKeyId = message.signingKeyId(); + const callSideEffect = async (field) => { + const wv = this.config.unwrapMaybeEncryptedData(field); + if (!wv) + return; + let v = wv.data; + let innerSigningKeyId; + if (isSignedData(v)) { + innerSigningKeyId = v.signingKeyId; + v = v.valueOf(); + } + const { action, data, meta } = v; + const mutation = { + data, + meta, + hash, + height, + contractID, + description: message.description(), + direction: message.direction(), + signingKeyId, + get signingContractID() { + return getContractIDfromKeyId(contractID, signingKeyId, state); + }, + innerSigningKeyId, + get innerSigningContractID() { + return getContractIDfromKeyId(contractID, innerSigningKeyId, state); + } + }; + return await sbp(`${manifestHash}/${action}/sideEffect`, mutation, state); + }; + const msg = Object(message.message()); + if (opT !== SPMessage.OP_ATOMIC) { + return callSideEffect(msg); + } + const reducer = (acc, [opT, opV]) => { + if ([SPMessage.OP_ACTION_ENCRYPTED, SPMessage.OP_ACTION_UNENCRYPTED].includes(opT)) { + acc.push(Object(opV)); + } + return acc; + }; + const actionsOpV = msg.reduce(reducer, []); + return Promise.allSettled(actionsOpV.map((action) => callSideEffect(action))).then((results) => { + const errors = results.filter((r) => r.status === 'rejected').map((r) => r.reason); + if (errors.length > 0) { + console.error('Side-effect errors', contractID, errors); + throw new AggregateError(errors, `Error at side effects for ${contractID}`); + } + }); + }, + async applyProcessResult({ message, state, contractState, processingErrored, postHandleEvent }) { + const contractID = message.contractID(); + const hash = message.hash(); + const height = message.height(); + await sbp('chelonia/db/addEntry', message); + if (!processingErrored) { + // Once side-effects are called, we apply changes to the state. + // This means, as mentioned above, that retrieving the contract state + // via the global state will yield incorrect results. Doing things in + // this order ensures that incomplete processing of events (i.e., process + // + side-effects), e.g., due to sudden failures (like power outages, + // Internet being disconnected, etc.) aren't persisted. This allows + // us to recover by re-processing the event when these sudden failures + // happen + this.config.reactiveSet(state, contractID, contractState); + try { + postHandleEvent?.(message); + } + catch (e) { + console.error(`[chelonia] ERROR '${e.name}' for ${message.description()} in event post-handling: ${e.message}`, e, { message: message.serialize() }); + } + } + // whether or not there was an exception, we proceed ahead with updating the head + // you can prevent this by throwing an exception in the processError hook + if (message.isFirstMessage()) { + const { type } = message.opValue(); + if (!has(state.contracts, contractID)) { + this.config.reactiveSet(state.contracts, contractID, Object.create(null)); + } + this.config.reactiveSet(state.contracts[contractID], 'type', type); + console.debug(`contract ${type} registered for ${contractID}`); + } + if (message.isKeyOp()) { + this.config.reactiveSet(state.contracts[contractID], 'previousKeyOp', hash); + } + this.config.reactiveSet(state.contracts[contractID], 'HEAD', hash); + this.config.reactiveSet(state.contracts[contractID], 'height', height); + // If there were decryption errors due to missing encryption keys, we store + // those key IDs. If those key IDs are later shared with us, we can re-sync + // the contract. Without this information, we can only guess whether a + // re-sync is needed or not. + // We do it here because the property is stored under `.contracts` instead + // of in the contract state itself, and this is where `.contracts` gets + // updated after handling a message. + const missingDecryptionKeyIdsForMessage = missingDecryptionKeyIdsMap.get(message); + if (missingDecryptionKeyIdsForMessage) { + let missingDecryptionKeyIds = state.contracts[contractID].missingDecryptionKeyIds; + if (!missingDecryptionKeyIds) { + missingDecryptionKeyIds = []; + this.config.reactiveSet(state.contracts[contractID], 'missingDecryptionKeyIds', missingDecryptionKeyIds); + } + missingDecryptionKeyIdsForMessage.forEach(keyId => { + if (missingDecryptionKeyIds.includes(keyId)) + return; + missingDecryptionKeyIds.push(keyId); + }); + } + if (!this.subscriptionSet.has(contractID)) { + const entry = this.pending.find((entry) => entry?.contractID === contractID); + // we've successfully received it back, so remove it from expectation pending + if (entry) { + const index = this.pending.indexOf(entry); + if (index !== -1) { + this.pending.splice(index, 1); + } + } + this.subscriptionSet.add(contractID); + sbp('okTurtles.events/emit', CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { added: [contractID], removed: [] }); + } + if (!processingErrored) { + sbp('okTurtles.events/emit', hash, contractID, message); + sbp('okTurtles.events/emit', EVENT_HANDLED, contractID, message); + } + } +}; +const notImplemented = (v) => { + throw new Error(`chelonia: action not implemented to handle: ${JSON.stringify(v)}.`); +}; +// The code below represents different ways to dynamically load code at runtime, +// and the SES example shows how to sandbox runtime loaded code (although it doesn't +// work, see https://github.com/endojs/endo/issues/1207 for details). It's also not +// super important since we're loaded signed contracts. +/* +// https://2ality.com/2019/10/eval-via-import.html +// Example: await import(esm`${source}`) +// const esm = ({ raw }, ...vals) => { +// return URL.createObjectURL(new Blob([String.raw({ raw }, ...vals)], { type: 'text/javascript' })) +// } + +// await loadScript.call(this, contractInfo.file, source, contractInfo.hash) +// .then(x => { +// console.debug(`loaded ${contractInfo.file}`) +// return x +// }) +// eslint-disable-next-line no-unused-vars +function loadScript (file, source, hash) { + return new Promise((resolve, reject) => { + const script = document.createElement('script') + // script.type = 'application/javascript' + script.type = 'module' + // problem with this is that scripts will step on each other's feet + script.text = source + // NOTE: this will work if the file route adds .header('Content-Type', 'application/javascript') + // script.src = `${this.config.connectionURL}/file/${hash}` + // this results in: "SyntaxError: import declarations may only appear at top level of a module" + // script.text = `(function () { + // ${source} + // })()` + script.onload = () => resolve(script) + script.onerror = (err) => reject(new Error(`${err || 'Error'} trying to load: ${file}`)) + document.getElementsByTagName('head')[0].appendChild(script) + }) +} + +// This code is cobbled together based on: +// https://github.com/endojs/endo/blob/master/packages/ses/test/test-import-cjs.js +// https://github.com/endojs/endo/blob/master/packages/ses/test/test-import.js +// const vm = await sesImportVM.call(this, `${this.config.connectionURL}/file/${contractInfo.hash}`) +// eslint-disable-next-line no-unused-vars +function sesImportVM (url): Promise { + // eslint-disable-next-line no-undef + const vm = new Compartment( + { + ...this.config.contracts.defaults.exposedGlobals, + console + }, + {}, // module map + { + resolveHook (spec, referrer) { + console.debug('resolveHook', { spec, referrer }) + return spec + }, + // eslint-disable-next-line require-await + async importHook (moduleSpecifier: string, ...args) { + const source = await this.config.fetch(moduleSpecifier).then(handleFetchResult('text')) + console.debug('importHook', { fetch: moduleSpecifier, args, source }) + const execute = (moduleExports, compartment, resolvedImports) => { + console.debug('execute called with:', { moduleExports, resolvedImports }) + const functor = compartment.evaluate( + `(function (require, exports, module, __filename, __dirname) { ${source} })` + // this doesn't seem to help with: https://github.com/endojs/endo/issues/1207 + // { __evadeHtmlCommentTest__: false, __rejectSomeDirectEvalExpressions__: false } + ) + const require_ = (importSpecifier) => { + console.debug('in-source require called with:', importSpecifier, 'keying:', resolvedImports) + const namespace = compartment.importNow(resolvedImports[importSpecifier]) + console.debug('got namespace:', namespace) + return namespace.default === undefined ? namespace : namespace.default + } + const module_ = { + get exports () { + return moduleExports + }, + set exports (newModuleExports) { + moduleExports.default = newModuleExports + } + } + functor(require_, moduleExports, module_, moduleSpecifier) + } + if (moduleSpecifier === '@common/common.mjs') { + return { + imports: [], + exports: ['Vue', 'L'], + execute + } + } else { + return { + imports: ['@common/common.mjs'], + exports: [], + execute + } + } + } + } + ) + // vm.evaluate(source) + return vm.import(url) +} +*/ diff --git a/dist/esm/local-selectors/index.d.mts b/dist/esm/local-selectors/index.d.mts new file mode 100644 index 0000000..d451d2b --- /dev/null +++ b/dist/esm/local-selectors/index.d.mts @@ -0,0 +1,2 @@ +declare const _default: string[]; +export default _default; diff --git a/dist/esm/local-selectors/index.mjs b/dist/esm/local-selectors/index.mjs new file mode 100644 index 0000000..5b21a80 --- /dev/null +++ b/dist/esm/local-selectors/index.mjs @@ -0,0 +1,119 @@ +// This file provides utility functions that are local regardless of whether +// Chelonia is running in a different context and calls are being forwarded +// using `chelonia/*` +import sbp from '@sbp/sbp'; +import { cloneDeep } from 'turtledash'; +import { CONTRACTS_MODIFIED, CONTRACTS_MODIFIED_READY, EVENT_HANDLED, EVENT_HANDLED_READY } from '../events.mjs'; +export default sbp('sbp/selectors/register', { + // This selector sets up event listeners on EVENT_HANDLED and CONTRACTS_MODIFIED + // to keep Chelonia state in sync with some external state (e.g., Vuex). + // This needs to be called from the context that owns this external state + // (e.g., the tab in which the app is running) and because 'full' Chelonia may + // be available in this context, we cannot use `chelonia/configure`. + // _If there is no external state to be kept in sync with Chelonia, this selector doesn't need to be called_ + // + // For example, **if Chelonia is running on a service worker**, the following + // would be done. + // 1. The service worker calls `chelonia/configure` and forwards EVENT_HANDLED + // and CONTRACTS_MODIFIED events to all clients (tabs) + // Note: `chelonia/configure` is called by the context running Chelonia + // 2. Each tab uses `chelonia/*` to forward calls to Chelonia to the SW. + // Note: Except selectors defined in this file + // 3. Each tab calls this selector once to set up event listeners on EVENT_HANDLED + // and CONTRACTS_MODIFIED, which will keep each tab's state updated every + // time Chelonia handles an event. + 'chelonia/externalStateSetup': function ({ stateSelector, reactiveSet = Reflect.set.bind(Reflect), reactiveDel = Reflect.deleteProperty.bind(Reflect) }) { + this.stateSelector = stateSelector; + sbp('okTurtles.events/on', EVENT_HANDLED, (contractID, message) => { + // The purpose of putting things immediately into a queue is to have + // state mutations happen in a well-defined order. This is done for two + // purposes: + // 1. It avoids race conditions + // 2. It allows the app to use the EVENT_HANDLED queue to ensure that + // the SW state has been copied over to the local state. This is + // useful in the same sense that `chelonia/contract/wait` is useful + // (i.e., set up a barrier / sync checkpoint). + sbp('okTurtles.eventQueue/queueEvent', EVENT_HANDLED, async () => { + const { contractState, cheloniaState } = await sbp('chelonia/contract/fullState', contractID); + const externalState = sbp(stateSelector); + if (cheloniaState) { + if (!externalState.contracts) { + reactiveSet(externalState, 'contracts', Object.create(null)); + } + reactiveSet(externalState.contracts, contractID, cloneDeep(cheloniaState)); + } + else if (externalState.contracts) { + reactiveDel(externalState.contracts, contractID); + } + if (contractState) { + reactiveSet(externalState, contractID, cloneDeep(contractState)); + } + else { + reactiveDel(externalState, contractID); + } + // This EVENT_HANDLED_READY event lets the current context (e.g., tab) + // know that an event has been processed _and_ committed to the state + // (as opposed to EVENT_HANDLED, which means the event was processed by + // _Chelonia_ but state changes may not be reflected in the current tab + // yet). + sbp('okTurtles.events/emit', EVENT_HANDLED_READY, contractID, message); + }); + }); + sbp('okTurtles.events/on', CONTRACTS_MODIFIED, (subscriptionSet, { added, removed, permanent }) => { + sbp('okTurtles.eventQueue/queueEvent', EVENT_HANDLED, async () => { + const states = added.length + ? await sbp('chelonia/contract/fullState', added) + : {}; + const vuexState = sbp('state/vuex/state'); + if (!vuexState.contracts) { + reactiveSet(vuexState, 'contracts', Object.create(null)); + } + removed.forEach((contractID) => { + if (permanent) { + reactiveSet(vuexState.contracts, contractID, null); + } + else { + reactiveDel(vuexState.contracts, contractID); + } + reactiveDel(vuexState, contractID); + }); + for (const contractID of added) { + const { contractState, cheloniaState } = states[contractID]; + if (cheloniaState) { + reactiveSet(vuexState.contracts, contractID, cloneDeep(cheloniaState)); + } + if (contractState) { + reactiveSet(vuexState, contractID, cloneDeep(contractState)); + } + } + sbp('okTurtles.events/emit', CONTRACTS_MODIFIED_READY, subscriptionSet, { added, removed }); + }); + }); + }, + // This function is similar in purpose to `chelonia/contract/wait`, except + // that it's also designed to take into account delays copying Chelonia state + // to an external state (e.g., when using `chelonia/externalStateSetup`). + 'chelonia/externalStateWait': async function (contractID) { + await sbp('chelonia/contract/wait', contractID); + const { cheloniaState } = await sbp('chelonia/contract/fullState', contractID); + const localState = sbp(this.stateSelector); + // If the current 'local' state has a height higher than or equal to the + // Chelonia height, we've processed all events and don't need to wait any + // longer. + if (!cheloniaState || cheloniaState.height <= localState.contracts[contractID]?.height) + return; + // Otherwise, listen for `EVENT_HANDLED_READY` events till we have reached + // the necessary height. + return new Promise((resolve) => { + const removeListener = sbp('okTurtles.events/on', EVENT_HANDLED_READY, (cID) => { + if (cID !== contractID) + return; + const localState = sbp(this.stateSelector); + if (cheloniaState.height <= localState.contracts[contractID]?.height) { + resolve(); + removeListener(); + } + }); + }); + } +}); diff --git a/dist/esm/persistent-actions.d.mts b/dist/esm/persistent-actions.d.mts new file mode 100644 index 0000000..cd55d64 --- /dev/null +++ b/dist/esm/persistent-actions.d.mts @@ -0,0 +1,51 @@ +import '@sbp/okturtles.events'; +import sbp from '@sbp/sbp'; +declare const timer: unique symbol; +type SbpInvocation = Parameters; +export type UUIDV4 = `${string}-${string}-${string}-${string}-${string}`; +type PersistentActionOptions = { + errorInvocation?: SbpInvocation; + maxAttempts: number; + retrySeconds: number; + skipCondition?: SbpInvocation; + totalFailureInvocation?: SbpInvocation; +}; +export type PersistentActionStatus = { + attempting: boolean; + failedAttemptsSoFar: number; + lastError: string; + nextRetry: string; + resolved: boolean; +}; +export type PersistentActionError = { + id: UUIDV4; + error: Error; +}; +export type PersistentActionSuccess = { + id: UUIDV4; + result: unknown; +}; +export type PersistentActionSbpStatus = { + id: UUIDV4; + invocation: SbpInvocation; + attempting: boolean; + failedAttemptsSoFar: number; + lastError: string; + nextRetry: string; + resolved: boolean; +}; +export declare class PersistentAction { + id: UUIDV4; + invocation: SbpInvocation; + options: PersistentActionOptions; + status: PersistentActionStatus; + [timer]?: ReturnType; + constructor(invocation: SbpInvocation, options?: Partial); + attempt(): Promise; + cancel(): void; + handleError(error: Error): Promise; + handleSuccess(result: unknown): void; + trySBP(invocation: SbpInvocation | void): Promise; +} +declare const _default: string[]; +export default _default; diff --git a/dist/esm/persistent-actions.mjs b/dist/esm/persistent-actions.mjs new file mode 100644 index 0000000..545d073 --- /dev/null +++ b/dist/esm/persistent-actions.mjs @@ -0,0 +1,212 @@ +import '@sbp/okturtles.events'; +import sbp from '@sbp/sbp'; +import { PERSISTENT_ACTION_FAILURE, PERSISTENT_ACTION_SUCCESS, PERSISTENT_ACTION_TOTAL_FAILURE } from './events.mjs'; +// Using `Symbol` to prevent enumeration; this avoids JSON serialization. +const timer = Symbol('timer'); +const coerceToError = (arg) => { + if (arg && arg instanceof Error) + return arg; + console.warn(tag, 'Please use Error objects when throwing or rejecting'); + return new Error((typeof arg === 'string' ? arg : JSON.stringify(arg)) ?? 'undefined'); +}; +const defaultOptions = { + maxAttempts: Number.POSITIVE_INFINITY, + retrySeconds: 30 +}; +const tag = '[chelonia.persistentActions]'; +export class PersistentAction { + id; + invocation; + options; + status; + [timer]; + constructor(invocation, options = {}) { + this.id = crypto.randomUUID(); + this.invocation = invocation; + this.options = { ...defaultOptions, ...options }; + this.status = { + attempting: false, + failedAttemptsSoFar: 0, + lastError: '', + nextRetry: '', + resolved: false + }; + } + async attempt() { + // Bail out if the action is already attempting or resolved. + // TODO: should we also check whether the skipCondition call is pending? + if (this.status.attempting || this.status.resolved) + return; + if (await this.trySBP(this.options.skipCondition)) + this.cancel(); + // We need to check this again because cancel() could have been called while awaiting the trySBP call. + if (this.status.resolved) + return; + try { + this.status.attempting = true; + const result = await sbp(...this.invocation); + this.status.attempting = false; + this.handleSuccess(result); + } + catch (error) { + this.status.attempting = false; + await this.handleError(coerceToError(error)); + } + } + cancel() { + if (this[timer]) + clearTimeout(this[timer]); + this.status.nextRetry = ''; + this.status.resolved = true; + } + async handleError(error) { + const { id, options, status } = this; + // Update relevant status fields before calling any optional code. + status.failedAttemptsSoFar++; + status.lastError = error.message; + const anyAttemptLeft = options.maxAttempts > status.failedAttemptsSoFar; + if (!anyAttemptLeft) + status.resolved = true; + status.nextRetry = anyAttemptLeft && !status.resolved + ? new Date(Date.now() + options.retrySeconds * 1e3).toISOString() + : ''; + // Perform any optional SBP invocation. + // The event has to be fired first for the action to be immediately removed from the list. + sbp('okTurtles.events/emit', PERSISTENT_ACTION_FAILURE, { error, id }); + await this.trySBP(options.errorInvocation); + if (!anyAttemptLeft) { + sbp('okTurtles.events/emit', PERSISTENT_ACTION_TOTAL_FAILURE, { error, id }); + await this.trySBP(options.totalFailureInvocation); + } + // Schedule a retry if appropriate. + if (status.nextRetry) { + // Note: there should be no older active timeout to clear. + this[timer] = setTimeout(() => { + this.attempt().catch((e) => { + console.error('Error attempting persistent action', id, e); + }); + }, this.options.retrySeconds * 1e3); + } + } + handleSuccess(result) { + const { id, status } = this; + status.lastError = ''; + status.nextRetry = ''; + status.resolved = true; + sbp('okTurtles.events/emit', PERSISTENT_ACTION_SUCCESS, { id, result }); + } + async trySBP(invocation) { + try { + return invocation ? await sbp(...invocation) : undefined; + } + catch (error) { + console.error(tag, coerceToError(error).message); + } + } +} +export default sbp('sbp/selectors/register', { + 'chelonia.persistentActions/_init'() { + this.actionsByID = Object.create(null); + this.checkDatabaseKey = () => { + if (!this.databaseKey) + throw new TypeError(`${tag} No database key configured`); + }; + sbp('okTurtles.events/on', PERSISTENT_ACTION_SUCCESS, ({ id }) => { + sbp('chelonia.persistentActions/cancel', id); + }); + sbp('okTurtles.events/on', PERSISTENT_ACTION_TOTAL_FAILURE, ({ id }) => { + sbp('chelonia.persistentActions/cancel', id); + }); + }, + // Cancels a specific action by its ID. + // The action won't be retried again, but an async action cannot be aborted if its promise is stil attempting. + async 'chelonia.persistentActions/cancel'(id) { + if (id in this.actionsByID) { + this.actionsByID[id].cancel(); + // Note: this renders the `.status` update in `.cancel()` meainingless, as + // the action will be immediately removed. TODO: Implement as periodic + // prune action so that actions are removed some time after completion. + // This way, one could implement action status reporting to clients. + delete this.actionsByID[id]; + return await sbp('chelonia.persistentActions/save'); + } + }, + // TODO: validation + 'chelonia.persistentActions/configure'({ databaseKey, options = {} }) { + this.databaseKey = databaseKey; + for (const key in options) { + if (key in defaultOptions) { + defaultOptions[key] = options[key]; + } + else { + throw new TypeError(`${tag} Unknown option: ${key}`); + } + } + }, + 'chelonia.persistentActions/enqueue'(...args) { + const ids = []; + for (const arg of args) { + const action = Array.isArray(arg) + ? new PersistentAction(arg) + : new PersistentAction(arg.invocation, arg); + this.actionsByID[action.id] = action; + ids.push(action.id); + } + sbp('chelonia.persistentActions/save').catch((e) => { + console.error('Error saving persistent actions', e); + }); + for (const id of ids) { + this.actionsByID[id].attempt().catch((e) => { + console.error('Error attempting persistent action', id, e); + }); + } + return ids; + }, + // Forces retrying a given persisted action immediately, rather than waiting for the scheduled retry. + // - 'status.failedAttemptsSoFar' will still be increased upon failure. + // - Does nothing if a retry is already running. + // - Does nothing if the action has already been resolved, rejected or cancelled. + 'chelonia.persistentActions/forceRetry'(id) { + if (id in this.actionsByID) { + return this.actionsByID[id].attempt(); + } + }, + // Loads and tries every stored persistent action under the configured database key. + async 'chelonia.persistentActions/load'() { + this.checkDatabaseKey(); + const storedActions = JSON.parse((await sbp('chelonia.db/get', this.databaseKey)) ?? '[]'); + for (const { id, invocation, options } of storedActions) { + this.actionsByID[id] = new PersistentAction(invocation, options); + // Use the stored ID instead of the autogenerated one. + // TODO: find a cleaner alternative. + this.actionsByID[id].id = id; + } + return sbp('chelonia.persistentActions/retryAll'); + }, + // Retry all existing persisted actions. + // TODO: add some delay between actions so as not to spam the server, + // or have a way to issue them all at once in a single network call. + 'chelonia.persistentActions/retryAll'() { + return Promise.allSettled(Object.keys(this.actionsByID).map(id => sbp('chelonia.persistentActions/forceRetry', id))); + }, + // Updates the database version of the attempting action list. + 'chelonia.persistentActions/save'() { + this.checkDatabaseKey(); + return sbp('chelonia.db/set', this.databaseKey, JSON.stringify(Object.values(this.actionsByID))); + }, + 'chelonia.persistentActions/status'() { + return Object.values(this.actionsByID) + .map((action) => ({ id: action.id, invocation: action.invocation, ...action.status })); + }, + // Pauses every currently loaded action, and removes them from memory. + // Note: persistent storage is not affected, so that these actions can be later loaded again and retried. + 'chelonia.persistentActions/unload'() { + for (const id in this.actionsByID) { + // Clear the action's timeout, but don't cancel it so that it can later resumed. + if (this.actionsByID[id][timer]) { + clearTimeout(this.actionsByID[id][timer]); + } + delete this.actionsByID[id]; + } + } +}); diff --git a/dist/esm/persistent-actions.test.d.mts b/dist/esm/persistent-actions.test.d.mts new file mode 100644 index 0000000..c717047 --- /dev/null +++ b/dist/esm/persistent-actions.test.d.mts @@ -0,0 +1,2 @@ +import './db.mjs'; +import './persistent-actions.mjs'; diff --git a/dist/esm/persistent-actions.test.mjs b/dist/esm/persistent-actions.test.mjs new file mode 100644 index 0000000..e822431 --- /dev/null +++ b/dist/esm/persistent-actions.test.mjs @@ -0,0 +1,206 @@ +// FIXME: `Error: unsafe must be called before registering selector` when Mocha reloads the file. +import sbp from '@sbp/sbp'; +import assert from 'node:assert'; +import { test } from 'node:test'; +import './db.mjs'; +import { PERSISTENT_ACTION_FAILURE, PERSISTENT_ACTION_SUCCESS, PERSISTENT_ACTION_TOTAL_FAILURE } from './events.mjs'; +import './persistent-actions.mjs'; +// Necessary to avoid 'JSON.stringify' errors since Node timeouts are circular objects, whereas browser timeouts are just integers. +setTimeout(() => { }).constructor.prototype.toJSON = () => undefined; +sbp('sbp/selectors/register', { + call(fn, ...args) { + return fn(...args); + }, + log(msg) { + console.log(msg); + }, + rejectAfter100ms(arg) { + return new Promise((resolve, reject) => { + setTimeout(() => reject(arg), 100); + }); + }, + resolveAfter100ms(arg) { + return new Promise((resolve) => { + setTimeout(() => resolve(arg), 100); + }); + }, + returnImmediately(arg) { + return arg; + }, + throwImmediately(arg) { + throw arg; + } +}); +const createRandomError = () => new Error(`Bad number: ${String(Math.random())}`); +const getActionStatus = (id) => sbp('chelonia.persistentActions/status').find((obj) => obj.id === id); +const isActionRemoved = (id) => !sbp('chelonia.persistentActions/status').find((obj) => obj.id === id); +// Custom `configure` options for tests. +// Mocha has a default 2000ms test timeout, therefore we'll use short delays. +const testOptions = { + maxAttempts: 3, + retrySeconds: 0.5 +}; +test('Test persistent actions', async (t) => { + const spies = { + returnImmediately: t.mock.fn(sbp('sbp/selectors/fn', 'returnImmediately')) + }; + await test('should configure', function () { + sbp('chelonia.persistentActions/configure', { + databaseKey: 'test-key', + options: testOptions + }); + }); + await test('should enqueue without immediately attempting', function () { + // Prepare actions to enqueue. Random numbers are used to make invocations different. + const args = [ + // Basic syntax. + ['returnImmediately', Math.random()], + // Minimal option syntax. + { + invocation: ['returnImmediately', Math.random()] + }, + // Full option syntax. + { + errorInvocation: ['log', 'Action n°3 failed'], + invocation: ['returnImmediately', Math.random()], + maxAttempts: 4, + retrySeconds: 5, + skipCondition: ['test'], + totalFailureInvocation: ['log', 'Action n°3 totally failed'] + } + ]; + const ids = sbp('chelonia.persistentActions/enqueue', ...args); + assert(Array.isArray(ids)); + assert(ids.length === args.length); + // Check the actions have been correctly queued. + ids.forEach((id, index) => { + const arg = args[index]; + const status = getActionStatus(id); + assert.strictEqual(status.id, id); + assert.deepEqual(status.invocation, Array.isArray(arg) ? arg : arg.invocation); + assert.strictEqual(status.attempting, false); + assert.strictEqual(status.failedAttemptsSoFar, 0); + assert.strictEqual(status.lastError, ''); + assert.strictEqual(status.nextRetry, ''); + assert.strictEqual(status.resolved, false); + }); + // Check the actions have NOT been tried yet. + assert.strictEqual(spies.returnImmediately.mock.callCount(), 0); + }); + await test('should emit a success event and remove the action', async () => { + // Prepare actions using both sync and async invocations. + // TODO: maybe the async case is enough, which would make the code simpler. + const randomNumbers = [Math.random(), Math.random()]; + const invocations = [ + ['resolveAfter100ms', randomNumbers[0]], + ['returnImmediately', randomNumbers[1]] + ]; + const ids = sbp('chelonia.persistentActions/enqueue', ...invocations); + await Promise.all(ids.map((id, index) => new Promise((resolve, reject) => { + // Registers a success handler for each received id. + sbp('okTurtles.events/on', PERSISTENT_ACTION_SUCCESS, function handler(details) { + if (details.id !== id) + return; + try { + // Check the action has actually been called and its result is correct. + assert.strictEqual(details.result, randomNumbers[index]); + // Check the action has been correctly removed. + assert(isActionRemoved(id)); + // Wait a little to make sure the action isn't going to be retried. + setTimeout(resolve, (testOptions.retrySeconds + 1) * 1e3); + } + catch (err) { + reject(err); + } + finally { + sbp('okTurtles.events/off', PERSISTENT_ACTION_SUCCESS, handler); + } + }); + }))); + }); + await test('should emit a failure event and schedule a retry', function () { + const ourError = createRandomError(); + const invocation = ['rejectAfter100ms', ourError]; + const [id] = sbp('chelonia.persistentActions/enqueue', invocation); + return new Promise((resolve, reject) => { + sbp('okTurtles.events/once', PERSISTENT_ACTION_FAILURE, (details) => { + try { + assert.strictEqual(details.id, id); + assert.strictEqual(details.error, ourError); + // Check the action status. + const status = getActionStatus(id); + assert.strictEqual(status.failedAttemptsSoFar, 1); + assert.strictEqual(status.lastError, ourError.message); + assert.strictEqual(status.resolved, false); + // Check a retry has been scheduled. + assert(new Date(status.nextRetry).getTime() - Date.now() <= testOptions.retrySeconds * 1e3); + resolve(); + } + catch (err) { + reject(err); + } + }); + }); + }); + await test('should emit N failure events, then a total failure event and remove the action (sync)', () => { + const ourError = createRandomError(); + const invocation = ['throwImmediately', ourError]; + return e2eFailureTest(invocation, ourError); + }); + await test('should emit N failure events, then a total failure event and remove the action (async)', () => { + const ourError = createRandomError(); + const invocation = ['rejectAfter100ms', ourError]; + return e2eFailureTest(invocation, ourError); + }); + await test('should handle non-Error failures gracefully', () => { + const ourError = 'not a real error'; + const invocation = ['rejectAfter100ms', ourError]; + return e2eFailureTest(invocation, ourError); + }); + function e2eFailureTest(invocation, ourError) { + const errorInvocationSpy = t.mock.fn(); + const errorInvocation = ['call', errorInvocationSpy]; + const [id] = sbp('chelonia.persistentActions/enqueue', { invocation, errorInvocation }); + return new Promise((resolve, reject) => { + let failureEventCounter = 0; + sbp('okTurtles.events/on', PERSISTENT_ACTION_FAILURE, (details) => { + if (details.id !== id) + return; + failureEventCounter++; + try { + assert(failureEventCounter <= testOptions.maxAttempts, '1'); + // Check the event handler was called before the corresponding SBP invocation. + assert.strictEqual(failureEventCounter, errorInvocationSpy.mock.callCount() + 1, '2'); + assert.strictEqual(details.error.message, ourError?.message ?? ourError, '3'); + } + catch (err) { + reject(err); + } + }); + sbp('okTurtles.events/on', PERSISTENT_ACTION_TOTAL_FAILURE, (details) => { + if (details.id !== id) + return; + try { + assert.strictEqual(failureEventCounter, testOptions.maxAttempts, '3'); + assert.strictEqual(errorInvocationSpy.mock.callCount(), testOptions.maxAttempts, '4'); + assert.strictEqual(details.error.message, ourError?.message ?? ourError, '5'); + assert(isActionRemoved(id), '6'); + resolve(); + } + catch (err) { + reject(err); + } + }); + }); + } + await test('should cancel and remove the given action', function () { + return new Promise((resolve, reject) => { + // This action will reject the promise and fail the test if it ever gets tried. + const [id] = sbp('chelonia.persistentActions/enqueue', ['call', reject]); + sbp('chelonia.persistentActions/cancel', id); + assert(isActionRemoved(id)); + // Wait half a second to be sure the action isn't going to be tried despite being removed. + setTimeout(resolve, 500); + }); + }); +}); diff --git a/dist/esm/presets.d.mts b/dist/esm/presets.d.mts new file mode 100644 index 0000000..3a8b619 --- /dev/null +++ b/dist/esm/presets.d.mts @@ -0,0 +1,8 @@ +export declare const SERVER: { + acceptAllMessages: boolean; + skipActionProcessing: boolean; + skipSideEffects: boolean; + skipDecryptionAttempts: boolean; + strictProcessing: boolean; + strictOrdering: boolean; +}; diff --git a/dist/esm/presets.mjs b/dist/esm/presets.mjs new file mode 100644 index 0000000..695e86e --- /dev/null +++ b/dist/esm/presets.mjs @@ -0,0 +1,22 @@ +// Right now, we only have a single preset, for the server. If this remains the +// case and only the server is special regarding configuration, consider +// introducing a `server: true` key to `chelonia/confgure` instead. +export const SERVER = { + // We don't check the subscriptionSet in the server because we accpt new + // contract registrations, and are also not subcribed to contracts the same + // way clients are + acceptAllMessages: true, + // The server also doesn't process actions + skipActionProcessing: true, + // The previous setting implies this one, which we set to be on the safe side + skipSideEffects: true, + // Changes the behaviour of unwrapMaybeEncryptedData so that it never decrypts. + // Mostly useful for the server, to avoid filling up the logs and for faster + // execution. + skipDecryptionAttempts: true, + // If an error occurs during processing, the message is rejected rather than + // ignored + strictProcessing: true, + // The server expects events to be received in order (no past or future events) + strictOrdering: true +}; diff --git a/dist/esm/pubsub/index.d.mts b/dist/esm/pubsub/index.d.mts new file mode 100644 index 0000000..02a4972 --- /dev/null +++ b/dist/esm/pubsub/index.d.mts @@ -0,0 +1,220 @@ +import '@sbp/okturtles.events'; +import type { JSONObject, JSONType } from '../types.mjs'; +export declare const NOTIFICATION_TYPE: Readonly<{ + ENTRY: "entry"; + DELETION: "deletion"; + KV: "kv"; + KV_FILTER: "kv_filter"; + PING: "ping"; + PONG: "pong"; + PUB: "pub"; + SUB: "sub"; + UNSUB: "unsub"; + VERSION_INFO: "version_info"; +}>; +export declare const REQUEST_TYPE: Readonly<{ + PUB: "pub"; + SUB: "sub"; + UNSUB: "unsub"; + PUSH_ACTION: "push_action"; + KV_FILTER: "kv_filter"; +}>; +export declare const RESPONSE_TYPE: Readonly<{ + ERROR: "error"; + OK: "ok"; +}>; +export declare const PUSH_SERVER_ACTION_TYPE: Readonly<{ + SEND_PUBLIC_KEY: "send-public-key"; + STORE_SUBSCRIPTION: "store-subscription"; + DELETE_SUBSCRIPTION: "delete-subscription"; + SEND_PUSH_NOTIFICATION: "send-push-notification"; +}>; +export type NotificationTypeEnum = typeof NOTIFICATION_TYPE[keyof typeof NOTIFICATION_TYPE]; +export type RequestTypeEnum = typeof REQUEST_TYPE[keyof typeof REQUEST_TYPE]; +export type ResponseTypeEnum = typeof RESPONSE_TYPE[keyof typeof RESPONSE_TYPE]; +type TimeoutID = ReturnType; +export type Options = { + logPingMessages: boolean; + pingTimeout: number; + maxReconnectionDelay: number; + maxRetries: number; + minReconnectionDelay: number; + reconnectOnDisconnection: boolean; + reconnectOnOnline: boolean; + reconnectOnTimeout: boolean; + reconnectionDelayGrowFactor: number; + timeout: number; + manual?: boolean; + handlers?: Partial; + messageHandlers?: Partial; +}; +export type Message = { + [key: string]: JSONType; + type: string; +}; +export type PubSubClient = { + connectionTimeoutID: TimeoutID | undefined; + connectionTimeUsed?: number; + customEventHandlers: Partial; + failedConnectionAttempts: number; + isLocal: boolean; + isNew: boolean; + listeners: ClientEventHandlers; + messageHandlers: MessageHandlers; + nextConnectionAttemptDelayID: TimeoutID | undefined; + options: Options; + pendingSubscriptionSet: Set; + pendingUnsubscriptionSet: Set; + pingTimeoutID: TimeoutID | undefined; + shouldReconnect: boolean; + socket: WebSocket | null; + subscriptionSet: Set; + kvFilter: Map; + url: string; + clearAllTimers(this: PubSubClient): void; + connect(this: PubSubClient): void; + destroy(this: PubSubClient): void; + pub(this: PubSubClient, channelID: string, data: JSONType): void; + scheduleConnectionAttempt(this: PubSubClient): void; + sub(this: PubSubClient, channelID: string): void; + unsub(this: PubSubClient, channelID: string): void; + getNextRandomDelay(this: PubSubClient): number; + setKvFilter(this: PubSubClient, channelID: string, kvFilter?: string[]): void; +}; +type ClientEventHandlers = { + close(this: PubSubClient, event: CloseEvent): void; + error(this: PubSubClient, event: Event): void; + message(this: PubSubClient, event: MessageEvent): void; + offline(this: PubSubClient, event: Event): void; + online(this: PubSubClient, event: Event): void; + open(this: PubSubClient, event: Event): void; + 'reconnection-attempt'(this: PubSubClient, event: CustomEvent): void; + 'reconnection-succeeded'(this: PubSubClient, event: CustomEvent): void; + 'reconnection-failed'(this: PubSubClient, event: CustomEvent): void; + 'reconnection-scheduled'(this: PubSubClient, event: CustomEvent): void; + 'subscription-succeeded'(this: PubSubClient, event: CustomEvent): void; +}; +type MessageHandlers = { + [NOTIFICATION_TYPE.ENTRY](this: PubSubClient, msg: { + data: JSONType; + type: string; + [x: string]: unknown; + }): void; + [NOTIFICATION_TYPE.PING](this: PubSubClient, msg: { + data: JSONType; + }): void; + [NOTIFICATION_TYPE.PUB](this: PubSubClient, msg: { + channelID: string; + data: JSONType; + }): void; + [NOTIFICATION_TYPE.KV](this: PubSubClient, msg: { + channelID: string; + key: string; + data: JSONType; + }): void; + [NOTIFICATION_TYPE.SUB](this: PubSubClient, msg: { + channelID: string; + type: string; + data: JSONType; + }): void; + [NOTIFICATION_TYPE.UNSUB](this: PubSubClient, msg: { + channelID: string; + type: string; + data: JSONType; + }): void; + [RESPONSE_TYPE.ERROR](this: PubSubClient, msg: { + data: { + type: string; + channelID: string; + data: JSONType; + reason: string; + actionType?: string; + message?: string; + }; + }): void; + [RESPONSE_TYPE.OK](this: PubSubClient, msg: { + data: { + type: string; + channelID: string; + }; + }): void; +}; +export type PubMessage = { + type: 'pub'; + channelID: string; + data: JSONType; +}; +export type SubMessage = { + [key: string]: JSONType; + type: 'sub'; + channelID: string; +} & { + kvFilter?: Array; +}; +export type UnsubMessage = { + [key: string]: JSONType; + type: 'unsub'; + channelID: string; +}; +export declare const PUBSUB_ERROR = "pubsub-error"; +export declare const PUBSUB_RECONNECTION_ATTEMPT = "pubsub-reconnection-attempt"; +export declare const PUBSUB_RECONNECTION_FAILED = "pubsub-reconnection-failed"; +export declare const PUBSUB_RECONNECTION_SCHEDULED = "pubsub-reconnection-scheduled"; +export declare const PUBSUB_RECONNECTION_SUCCEEDED = "pubsub-reconnection-succeeded"; +export declare const PUBSUB_SUBSCRIPTION_SUCCEEDED = "pubsub-subscription-succeeded"; +/** + * Creates a pubsub client instance. + * + * @param {string} url - A WebSocket URL to connect to. + * @param {Object?} options + * {object?} handlers - Custom handlers for WebSocket events. + * {boolean?} logPingMessages - Whether to log received pings. + * {boolean?} manual - Whether the factory should call 'connect()' automatically. + * Also named 'autoConnect' or 'startClosed' in other libraries. + * {object?} messageHandlers - Custom handlers for different message types. + * {number?} pingTimeout=45_000 - How long to wait for the server to send a ping, in milliseconds. + * {boolean?} reconnectOnDisconnection=true - Whether to reconnect after a server-side disconnection. + * {boolean?} reconnectOnOnline=true - Whether to reconnect after coming back online. + * {boolean?} reconnectOnTimeout=false - Whether to reconnect after a connection timeout. + * {number?} timeout=5_000 - Connection timeout duration in milliseconds. + * @returns {PubSubClient} + */ +export declare function createClient(url: string, options?: Partial): PubSubClient; +export declare function createMessage(type: string, data: JSONType, meta?: object | null | undefined): { + type: string; + data: JSONType; + [x: string]: unknown; +}; +export declare function createKvMessage(channelID: string, key: string, data: JSONType): string; +export declare function createPubMessage(channelID: string, data: JSONType): string; +export declare function createRequest(type: RequestTypeEnum, data: JSONObject): string; +export declare const messageParser: (data: string) => Message; +declare const _default: { + NOTIFICATION_TYPE: Readonly<{ + ENTRY: "entry"; + DELETION: "deletion"; + KV: "kv"; + KV_FILTER: "kv_filter"; + PING: "ping"; + PONG: "pong"; + PUB: "pub"; + SUB: "sub"; + UNSUB: "unsub"; + VERSION_INFO: "version_info"; + }>; + REQUEST_TYPE: Readonly<{ + PUB: "pub"; + SUB: "sub"; + UNSUB: "unsub"; + PUSH_ACTION: "push_action"; + KV_FILTER: "kv_filter"; + }>; + RESPONSE_TYPE: Readonly<{ + ERROR: "error"; + OK: "ok"; + }>; + createClient: typeof createClient; + createMessage: typeof createMessage; + createRequest: typeof createRequest; +}; +export default _default; diff --git a/dist/esm/pubsub/index.mjs b/dist/esm/pubsub/index.mjs new file mode 100644 index 0000000..50e2b5a --- /dev/null +++ b/dist/esm/pubsub/index.mjs @@ -0,0 +1,629 @@ +/* eslint-disable @typescript-eslint/no-this-alias */ +import '@sbp/okturtles.events'; +import sbp from '@sbp/sbp'; +// ====== Enums ====== // +export const NOTIFICATION_TYPE = Object.freeze({ + ENTRY: 'entry', + DELETION: 'deletion', + KV: 'kv', + KV_FILTER: 'kv_filter', + PING: 'ping', + PONG: 'pong', + PUB: 'pub', + SUB: 'sub', + UNSUB: 'unsub', + VERSION_INFO: 'version_info' +}); +export const REQUEST_TYPE = Object.freeze({ + PUB: 'pub', + SUB: 'sub', + UNSUB: 'unsub', + PUSH_ACTION: 'push_action', + KV_FILTER: 'kv_filter' +}); +export const RESPONSE_TYPE = Object.freeze({ + ERROR: 'error', + OK: 'ok' +}); +export const PUSH_SERVER_ACTION_TYPE = Object.freeze({ + SEND_PUBLIC_KEY: 'send-public-key', + STORE_SUBSCRIPTION: 'store-subscription', + DELETE_SUBSCRIPTION: 'delete-subscription', + SEND_PUSH_NOTIFICATION: 'send-push-notification' +}); +// TODO: verify these are good defaults +const defaultOptions = { + logPingMessages: process.env.NODE_ENV === 'development' && !process.env.CI, + pingTimeout: 45000, + maxReconnectionDelay: 60000, + maxRetries: 10, + minReconnectionDelay: 500, + reconnectOnDisconnection: true, + reconnectOnOnline: true, + // Defaults to false to avoid reconnection attempts in case the server doesn't + // respond because of a failed authentication. + reconnectOnTimeout: false, + reconnectionDelayGrowFactor: 2, + timeout: 60000 +}; +// ====== Event name constants ====== // +export const PUBSUB_ERROR = 'pubsub-error'; +export const PUBSUB_RECONNECTION_ATTEMPT = 'pubsub-reconnection-attempt'; +export const PUBSUB_RECONNECTION_FAILED = 'pubsub-reconnection-failed'; +export const PUBSUB_RECONNECTION_SCHEDULED = 'pubsub-reconnection-scheduled'; +export const PUBSUB_RECONNECTION_SUCCEEDED = 'pubsub-reconnection-succeeded'; +export const PUBSUB_SUBSCRIPTION_SUCCEEDED = 'pubsub-subscription-succeeded'; +// ====== API ====== // +/** + * Creates a pubsub client instance. + * + * @param {string} url - A WebSocket URL to connect to. + * @param {Object?} options + * {object?} handlers - Custom handlers for WebSocket events. + * {boolean?} logPingMessages - Whether to log received pings. + * {boolean?} manual - Whether the factory should call 'connect()' automatically. + * Also named 'autoConnect' or 'startClosed' in other libraries. + * {object?} messageHandlers - Custom handlers for different message types. + * {number?} pingTimeout=45_000 - How long to wait for the server to send a ping, in milliseconds. + * {boolean?} reconnectOnDisconnection=true - Whether to reconnect after a server-side disconnection. + * {boolean?} reconnectOnOnline=true - Whether to reconnect after coming back online. + * {boolean?} reconnectOnTimeout=false - Whether to reconnect after a connection timeout. + * {number?} timeout=5_000 - Connection timeout duration in milliseconds. + * @returns {PubSubClient} + */ +export function createClient(url, options = {}) { + const client = { + customEventHandlers: options.handlers || {}, + // The current number of connection attempts that failed. + // Reset to 0 upon successful connection. + // Used to compute how long to wait before the next reconnection attempt. + failedConnectionAttempts: 0, + isLocal: /\/\/(localhost|127\.0\.0\.1)([:?/]|$)/.test(url), + // True if this client has never been connected yet. + isNew: true, + listeners: Object.create(null), + messageHandlers: { ...defaultMessageHandlers, ...options.messageHandlers }, + nextConnectionAttemptDelayID: undefined, + options: { ...defaultOptions, ...options }, + // Requested subscriptions for which we didn't receive a response yet. + pendingSubscriptionSet: new Set(), + pendingUnsubscriptionSet: new Set(), + pingTimeoutID: undefined, + shouldReconnect: true, + // The underlying WebSocket object. + // A new one is necessary for every connection or reconnection attempt. + socket: null, + subscriptionSet: new Set(), + kvFilter: new Map(), + connectionTimeoutID: undefined, + url: url.replace(/^http/, 'ws'), + ...publicMethods + }; + // Create and save references to reusable event listeners. + // Every time a new underlying WebSocket object will be created for this + // client instance, these event listeners will be detached from the older + // socket then attached to the new one, hereby avoiding both unnecessary + // allocations and garbage collections of a bunch of functions every time. + // Another benefit is the ability to patch the client protocol at runtime by + // updating the client's custom event handler map. + for (const name of Object.keys(defaultClientEventHandlers)) { + client.listeners[name] = (event) => { + try { + // Use `.call()` to pass the client via the 'this' binding. + ; + defaultClientEventHandlers[name].call(client, event); + client.customEventHandlers[name]?.call(client, event); + } + catch (error) { + // Do not throw any error but emit an `error` event instead. + sbp('okTurtles.events/emit', PUBSUB_ERROR, client, error?.message); + } + }; + } + // Add global event listeners before the first connection. + if (typeof self === 'object' && self instanceof EventTarget) { + for (const name of globalEventNames) { + globalEventMap.set(name, client.listeners[name]); + } + } + if (!client.options.manual) { + client.connect(); + } + return client; +} +export function createMessage(type, data, meta) { + const message = { ...meta, type, data }; + let string; + const stringify = function () { + if (!string) + string = JSON.stringify(this); + return string; + }; + Object.defineProperties(message, { + [Symbol.toPrimitive]: { + value: stringify + } + }); + return message; +} +export function createKvMessage(channelID, key, data) { + return JSON.stringify({ type: NOTIFICATION_TYPE.KV, channelID, key, data }); +} +export function createPubMessage(channelID, data) { + return JSON.stringify({ type: NOTIFICATION_TYPE.PUB, channelID, data }); +} +export function createRequest(type, data) { + // Had to use Object.assign() instead of object spreading to make Flow happy. + return JSON.stringify(Object.assign({ type }, data)); +} +// These handlers receive the PubSubClient instance through the `this` binding. +const defaultClientEventHandlers = { + // Emitted when the connection is closed. + close(event) { + const client = this; + console.debug('[pubsub] Event: close', event.code, event.reason); + client.failedConnectionAttempts++; + if (client.socket) { + // Remove event listeners to avoid memory leaks. + for (const name of socketEventNames) { + client.socket.removeEventListener(name, client.listeners[name]); + } + } + client.socket = null; + client.clearAllTimers(); + // This has been commented out to make the client always try to reconnect. + // See https://github.com/okTurtles/group-income/issues/1246 + /* + // See "Status Codes" https://tools.ietf.org/html/rfc6455#section-7.4 + switch (event.code) { + // TODO: verify that this list of codes is correct. + case 1000: case 1002: case 1003: case 1007: case 1008: { + client.shouldReconnect = false + break + } + default: break + } + */ + // If we should reconnect then consider our current subscriptions as pending again, + // waiting to be restored upon reconnection. + if (client.shouldReconnect) { + client.subscriptionSet.forEach((channelID) => { + // Skip contracts from which we had to unsubscribe anyway. + if (!client.pendingUnsubscriptionSet.has(channelID)) { + client.pendingSubscriptionSet.add(channelID); + } + }); + } + // We are no longer subscribed to any contracts since we are now disconnected. + client.subscriptionSet.clear(); + client.pendingUnsubscriptionSet.clear(); + if (client.shouldReconnect && client.options.reconnectOnDisconnection) { + if (client.failedConnectionAttempts > client.options.maxRetries) { + sbp('okTurtles.events/emit', PUBSUB_RECONNECTION_FAILED, client); + } + else { + // If we are definetely offline then do not try to reconnect now, + // unless the server is local. + if (!isDefinetelyOffline() || client.isLocal) { + client.scheduleConnectionAttempt(); + } + } + } + }, + // Emitted when an error has occured. + // The socket will be closed automatically by the engine if necessary. + error(event) { + const client = this; + // Not all error events should be logged with console.error, for example every + // failed connection attempt generates one such event. + console.warn('[pubsub] Event: error', event); + clearTimeout(client.pingTimeoutID); + }, + // Emitted when a message is received. + // The connection will be terminated if the message is malformed or has an + // unexpected data type (e.g. binary instead of text). + message(event) { + const client = this; + const { data } = event; + if (typeof data !== 'string') { + sbp('okTurtles.events/emit', PUBSUB_ERROR, client, { + message: `Wrong data type: ${typeof data}` + }); + return client.destroy(); + } + let msg = { type: '' }; + try { + msg = messageParser(data); + } + catch (error) { + sbp('okTurtles.events/emit', PUBSUB_ERROR, client, { + message: `Malformed message: ${error?.message}` + }); + return client.destroy(); + } + const handler = client.messageHandlers[msg.type]; + if (handler) { + handler.call(client, msg); + } + else { + throw new Error(`Unhandled message type: ${msg.type}`); + } + }, + offline() { + console.info('[pubsub] Event: offline'); + const client = this; + client.clearAllTimers(); + // Reset the connection attempt counter so that we'll start a new + // reconnection loop when we are back online. + client.failedConnectionAttempts = 0; + client.socket?.close(); + }, + online() { + console.info('[pubsub] Event: online'); + const client = this; + if (client.options.reconnectOnOnline && client.shouldReconnect) { + if (!client.socket) { + client.failedConnectionAttempts = 0; + client.scheduleConnectionAttempt(); + } + } + }, + // Emitted when the connection is established. + open() { + console.debug('[pubsub] Event: open'); + const client = this; + const { options } = this; + client.connectionTimeUsed = undefined; + client.clearAllTimers(); + sbp('okTurtles.events/emit', PUBSUB_RECONNECTION_SUCCEEDED, client); + // Set it to -1 so that it becomes 0 on the next `close` event. + client.failedConnectionAttempts = -1; + client.isNew = false; + // Setup a ping timeout if required. + // It will close the connection if we don't get any message from the server. + if (options.pingTimeout > 0 && options.pingTimeout < Infinity) { + client.pingTimeoutID = setTimeout(() => { + client.socket?.close(); + }, options.pingTimeout); + } + // Send any pending subscription request. + client.pendingSubscriptionSet.forEach((channelID) => { + const kvFilter = this.kvFilter.get(channelID); + client.socket?.send(createRequest(REQUEST_TYPE.SUB, kvFilter ? { channelID, kvFilter } : { channelID })); + }); + // There should be no pending unsubscription since we just got connected. + }, + 'reconnection-attempt'() { + console.info('[pubsub] Trying to reconnect...'); + }, + 'reconnection-succeeded'() { + console.info('[pubsub] Connection re-established'); + }, + 'reconnection-failed'() { + console.warn('[pubsub] Reconnection failed'); + const client = this; + client.destroy(); + }, + 'reconnection-scheduled'(event) { + const { delay, nth } = event.detail; + console.info(`[pubsub] Scheduled connection attempt ${nth} in ~${delay} ms`); + }, + 'subscription-succeeded'(event) { + const { channelID } = event.detail; + console.debug(`[pubsub] Subscribed to channel ${channelID}`); + } +}; +// These handlers receive the PubSubClient instance through the `this` binding. +const defaultMessageHandlers = { + [NOTIFICATION_TYPE.ENTRY](msg) { + console.debug('[pubsub] Received ENTRY:', msg); + }, + [NOTIFICATION_TYPE.PING]({ data }) { + const client = this; + if (client.options.logPingMessages) { + console.debug(`[pubsub] Ping received in ${Date.now() - Number(data)} ms`); + } + // Reply with a pong message using the same data. + // TODO: Type coercion to string because we actually support passing this + // object type, but the correct TypeScript type hasn't been written. + client.socket?.send(createMessage(NOTIFICATION_TYPE.PONG, data)); + // Refresh the ping timer, waiting for the next ping. + clearTimeout(client.pingTimeoutID); + client.pingTimeoutID = setTimeout(() => { + client.socket?.close(); + }, client.options.pingTimeout); + }, + [NOTIFICATION_TYPE.PUB]({ channelID, data }) { + console.log(`[pubsub] Received data from channel ${channelID}:`, data); + // No need to reply. + }, + [NOTIFICATION_TYPE.KV]({ channelID, key, data }) { + console.log(`[pubsub] Received KV update from channel ${channelID} ${key}:`, data); + // No need to reply. + }, + [NOTIFICATION_TYPE.SUB](msg) { + console.debug(`[pubsub] Ignoring ${msg.type} message:`, msg.data); + }, + [NOTIFICATION_TYPE.UNSUB](msg) { + console.debug(`[pubsub] Ignoring ${msg.type} message:`, msg.data); + }, + [RESPONSE_TYPE.ERROR]({ data }) { + const { type, channelID, reason } = data; + console.warn(`[pubsub] Received ERROR response for ${type} request to ${channelID}`); + const client = this; + switch (type) { + case REQUEST_TYPE.SUB: { + console.warn(`[pubsub] Could not subscribe to ${channelID}: ${reason}`); + client.pendingSubscriptionSet.delete(channelID); + break; + } + case REQUEST_TYPE.UNSUB: { + console.warn(`[pubsub] Could not unsubscribe from ${channelID}: ${reason}`); + client.pendingUnsubscriptionSet.delete(channelID); + break; + } + case REQUEST_TYPE.PUSH_ACTION: { + const { actionType, message } = data; + console.warn(`[pubsub] Received ERROR for PUSH_ACTION request with the action type '${actionType}' and the following message: ${message}`); + break; + } + default: { + console.error(`[pubsub] Malformed response: invalid request type ${type}`); + } + } + }, + [RESPONSE_TYPE.OK]({ data: { type, channelID } }) { + const client = this; + switch (type) { + case REQUEST_TYPE.SUB: { + client.pendingSubscriptionSet.delete(channelID); + client.subscriptionSet.add(channelID); + sbp('okTurtles.events/emit', PUBSUB_SUBSCRIPTION_SUCCEEDED, client, { channelID }); + break; + } + case REQUEST_TYPE.UNSUB: { + console.debug(`[pubsub] Unsubscribed from ${channelID}`); + client.pendingUnsubscriptionSet.delete(channelID); + client.subscriptionSet.delete(channelID); + client.kvFilter.delete(channelID); + break; + } + case REQUEST_TYPE.KV_FILTER: { + console.debug(`[pubsub] Set KV filter for ${channelID}`); + break; + } + default: { + console.error(`[pubsub] Malformed response: invalid request type ${type}`); + } + } + } +}; +const globalEventNames = ['offline', 'online']; +const socketEventNames = ['close', 'error', 'message', 'open']; +// eslint-disable-next-line func-call-spacing +const globalEventMap = new Map(); +if (typeof self === 'object' && self instanceof EventTarget) { + // We need to do things in this roundabout way because Chrome doesn't like + // these events handlers not being top-level. + // `Event handler of 'online' event must be added on the initial evaluation of worker script.` + for (const name of globalEventNames) { + const handler = (ev) => { + const h = globalEventMap.get(name); + return h?.(ev); + }; + self.addEventListener(name, handler, false); + } +} +// `navigator.onLine` can give confusing false positives when `true`, +// so we'll define `isDefinetelyOffline()` rather than `isOnline()` or `isOffline()`. +// See https://developer.mozilla.org/en-US/docs/Web/API/Navigator/onLine +const isDefinetelyOffline = () => typeof navigator === 'object' && navigator.onLine === false; +// Parses and validates a received message. +export const messageParser = (data) => { + const msg = JSON.parse(data); + if (typeof msg !== 'object' || msg === null) { + throw new TypeError('Message is null or not an object'); + } + const { type } = msg; + if (typeof type !== 'string' || type === '') { + throw new TypeError('Message type must be a non-empty string'); + } + return msg; +}; +const publicMethods = { + clearAllTimers() { + const client = this; + clearTimeout(client.connectionTimeoutID); + clearTimeout(client.nextConnectionAttemptDelayID); + clearTimeout(client.pingTimeoutID); + client.connectionTimeoutID = undefined; + client.nextConnectionAttemptDelayID = undefined; + client.pingTimeoutID = undefined; + }, + // Performs a connection or reconnection attempt. + connect() { + const client = this; + if (client.socket !== null) { + throw new Error('connect() can only be called if there is no current socket.'); + } + if (client.nextConnectionAttemptDelayID) { + throw new Error('connect() must not be called during a reconnection delay.'); + } + if (!client.shouldReconnect) { + throw new Error('connect() should no longer be called on this instance.'); + } + client.socket = new WebSocket(client.url); + // Sometimes (like when using `createMessage`), we want to send objects that + // are serialized as strings. Native web sockets don't support objects, so + // we use this workaround. + client.socket.send = function (data) { + const send = WebSocket.prototype.send.bind(this); + if (typeof data === 'object' && + typeof data[Symbol.toPrimitive] === 'function') { + return send(data[Symbol.toPrimitive]()); + } + return send(data); + }; + if (client.options.timeout) { + const start = performance.now(); + client.connectionTimeoutID = setTimeout(() => { + client.connectionTimeoutID = undefined; + if (client.options.reconnectOnTimeout) { + client.connectionTimeUsed = performance.now() - start; + } + client.socket?.close(4000, 'timeout'); + }, client.options.timeout); + } + // Attach WebSocket event listeners. + for (const name of socketEventNames) { + client.socket.addEventListener(name, client.listeners[name]); + } + }, + /** + * Immediately close the socket, stop listening for events and clear any cache. + * + * This method is used in unit tests. + * - In particular, no 'close' event handler will be called. + * - Any incoming or outgoing buffered data will be discarded. + * - Any pending messages will be discarded. + */ + destroy() { + const client = this; + client.clearAllTimers(); + // Update property values. + // Note: do not clear 'client.options'. + client.pendingSubscriptionSet.clear(); + client.pendingUnsubscriptionSet.clear(); + client.subscriptionSet.clear(); + // Remove global event listeners. + if (typeof self === 'object' && self instanceof EventTarget) { + for (const name of globalEventNames) { + globalEventMap.delete(name); + } + } + // Remove WebSocket event listeners. + if (client.socket) { + for (const name of socketEventNames) { + client.socket.removeEventListener(name, client.listeners[name]); + } + client.socket.close(); + } + client.listeners = Object.create(null); + client.socket = null; + client.shouldReconnect = false; + }, + getNextRandomDelay() { + const client = this; + const { maxReconnectionDelay, minReconnectionDelay, reconnectionDelayGrowFactor } = client.options; + const minDelay = minReconnectionDelay * reconnectionDelayGrowFactor ** client.failedConnectionAttempts; + const maxDelay = minDelay * reconnectionDelayGrowFactor; + const connectionTimeUsed = client.connectionTimeUsed; + client.connectionTimeUsed = undefined; + return Math.min( + // See issue #1943: Have the connection time used 'eat into' the + // reconnection time used + Math.max(minReconnectionDelay, connectionTimeUsed ? maxReconnectionDelay - connectionTimeUsed : maxReconnectionDelay), Math.round(minDelay + (0, Math.random)() * (maxDelay - minDelay))); + }, + // Schedules a connection attempt to happen after a delay computed according to + // a randomized exponential backoff algorithm variant. + scheduleConnectionAttempt() { + const client = this; + if (!client.shouldReconnect) { + throw new Error('Cannot call `scheduleConnectionAttempt()` when `shouldReconnect` is false.'); + } + if (client.nextConnectionAttemptDelayID) { + return console.warn('[pubsub] A reconnection attempt is already scheduled.'); + } + const delay = client.getNextRandomDelay(); + const nth = client.failedConnectionAttempts + 1; + client.nextConnectionAttemptDelayID = setTimeout(() => { + sbp('okTurtles.events/emit', PUBSUB_RECONNECTION_ATTEMPT, client); + client.nextConnectionAttemptDelayID = undefined; + client.connect(); + }, delay); + sbp('okTurtles.events/emit', PUBSUB_RECONNECTION_SCHEDULED, client, { delay, nth }); + }, + // Can be used to send ephemeral messages outside of any contract log. + // Does nothing if the socket is not in the OPEN state. + pub(channelID, data) { + if (this.socket?.readyState === WebSocket.OPEN) { + this.socket.send(createPubMessage(channelID, data)); + } + }, + /** + * Sends a SUB request to the server as soon as possible. + * + * - The given channel ID will be cached until we get a relevant server + * response, allowing us to resend the same request if necessary. + * - Any identical UNSUB request that has not been sent yet will be cancelled. + * - Calling this method again before the server has responded has no effect. + * @param channelID - The ID of the channel whose updates we want to subscribe to. + */ + sub(channelID) { + const client = this; + const { socket } = this; + if (!client.pendingSubscriptionSet.has(channelID)) { + client.pendingSubscriptionSet.add(channelID); + client.pendingUnsubscriptionSet.delete(channelID); + if (socket?.readyState === WebSocket.OPEN) { + const kvFilter = client.kvFilter.get(channelID); + socket.send(createRequest(REQUEST_TYPE.SUB, kvFilter ? { channelID, kvFilter } : { channelID })); + } + } + }, + /** + * Sends a KV_FILTER request to the server as soon as possible. + */ + setKvFilter(channelID, kvFilter) { + const client = this; + const { socket } = this; + if (kvFilter) { + client.kvFilter.set(channelID, kvFilter); + } + else { + client.kvFilter.delete(channelID); + } + if (client.subscriptionSet.has(channelID)) { + if (socket?.readyState === WebSocket.OPEN) { + socket.send(createRequest(REQUEST_TYPE.KV_FILTER, kvFilter ? { channelID, kvFilter } : { channelID })); + } + } + }, + /** + * Sends an UNSUB request to the server as soon as possible. + * + * - The given channel ID will be cached until we get a relevant server + * response, allowing us to resend the same request if necessary. + * - Any identical SUB request that has not been sent yet will be cancelled. + * - Calling this method again before the server has responded has no effect. + * @param channelID - The ID of the channel whose updates we want to unsubscribe from. + */ + unsub(channelID) { + const client = this; + const { socket } = this; + if (!client.pendingUnsubscriptionSet.has(channelID)) { + client.pendingSubscriptionSet.delete(channelID); + client.pendingUnsubscriptionSet.add(channelID); + if (socket?.readyState === WebSocket.OPEN) { + socket.send(createRequest(REQUEST_TYPE.UNSUB, { channelID })); + } + } + } +}; +// Register custom SBP event listeners before the first connection. +for (const name of Object.keys(defaultClientEventHandlers)) { + if (name === 'error' || !socketEventNames.includes(name)) { + sbp('okTurtles.events/on', `pubsub-${name}`, (target, detail) => { + const ev = new CustomEvent(name, { detail }); + target.listeners[name].call(target, ev); + }); + } +} +export default { + NOTIFICATION_TYPE, + REQUEST_TYPE, + RESPONSE_TYPE, + createClient, + createMessage, + createRequest +}; diff --git a/dist/esm/pubsub/index.test.d.mts b/dist/esm/pubsub/index.test.d.mts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/dist/esm/pubsub/index.test.d.mts @@ -0,0 +1 @@ +export {}; diff --git a/dist/esm/pubsub/index.test.mjs b/dist/esm/pubsub/index.test.mjs new file mode 100644 index 0000000..e0927fa --- /dev/null +++ b/dist/esm/pubsub/index.test.mjs @@ -0,0 +1,41 @@ +import * as assert from 'node:assert/strict'; +import { describe, it } from 'node:test'; +import { createClient } from './index.mjs'; +const client = createClient('ws://localhost:8080', { + manual: true, + reconnectOnDisconnection: false, + reconnectOnOnline: false, + reconnectOnTimeout: false +}); +const { maxReconnectionDelay, minReconnectionDelay } = client.options; +const createRandomDelays = (number) => { + return [...new Array(number)].map((_, i) => { + client.failedConnectionAttempts = i; + return client.getNextRandomDelay(); + }); +}; +const delays1 = createRandomDelays(10); +const delays2 = createRandomDelays(10); +describe('Test getNextRandomDelay()', function () { + it('every delay should be longer than the previous one', function () { + // In other words, the delays should be sorted in ascending numerical order. + assert.deepEqual(delays1, [...delays1].sort((a, b) => a - b)); + assert.deepEqual(delays2, [...delays2].sort((a, b) => a - b)); + }); + it('no delay should be shorter than the minimal reconnection delay', function () { + delays1.forEach((delay) => { + assert.ok(delay >= minReconnectionDelay); + }); + delays2.forEach((delay) => { + assert.ok(delay >= minReconnectionDelay); + }); + }); + it('no delay should be longer than the maximal reconnection delay', function () { + delays1.forEach((delay) => { + assert.ok(delay <= maxReconnectionDelay); + }); + delays2.forEach((delay) => { + assert.ok(delay <= maxReconnectionDelay); + }); + }); +}); diff --git a/dist/esm/signedData.d.mts b/dist/esm/signedData.d.mts new file mode 100644 index 0000000..7be328f --- /dev/null +++ b/dist/esm/signedData.d.mts @@ -0,0 +1,32 @@ +import type { Key } from '@chelonia/crypto'; +import type { ChelContractState } from './types.mjs'; +export interface SignedData { + signingKeyId: string; + valueOf: () => T; + serialize: (additionalData?: string) => U & { + _signedData: [string, string, string]; + }; + context?: [string, U & { + _signedData: [string, string, string]; + }, number, string]; + toString: (additionalData?: string) => string; + recreate?: (data: T) => SignedData; + toJSON?: () => U & { + _signedData: [string, string, string]; + }; + get: (k: keyof U) => U[typeof k] | undefined; + set?: (k: keyof U, v: U[typeof k]) => void; +} +export declare const isSignedData: (o: unknown) => o is SignedData; +export declare const signedOutgoingData: (stateOrContractID: string | ChelContractState, sKeyId: string, data: T, additionalKeys?: Record) => SignedData; +export declare const signedOutgoingDataWithRawKey: (key: Key, data: T) => SignedData; +export declare const signedIncomingData: (contractID: string, state: object | null | undefined, data: U & { + _signedData: [string, string, string]; +}, height: number, additionalData: string, mapperFn?: (value: V) => T) => SignedData; +export declare const signedDataKeyId: (data: unknown) => string; +export declare const isRawSignedData: (data: unknown) => data is { + _signedData: [string, string, string]; +}; +export declare const rawSignedIncomingData: (data: U & { + _signedData: [string, string, string]; +}) => SignedData; diff --git a/dist/esm/signedData.mjs b/dist/esm/signedData.mjs new file mode 100644 index 0000000..df38344 --- /dev/null +++ b/dist/esm/signedData.mjs @@ -0,0 +1,270 @@ +import { deserializeKey, keyId, serializeKey, sign, verifySignature } from '@chelonia/crypto'; +import sbp from '@sbp/sbp'; +import { has } from 'turtledash'; +import { ChelErrorSignatureError, ChelErrorSignatureKeyNotFound, ChelErrorSignatureKeyUnauthorized } from './errors.mjs'; +import { blake32Hash } from './functions.mjs'; +const rootStateFn = () => sbp('chelonia/rootState'); +// `proto` & `wrapper` are utilities for `isSignedData` +const proto = Object.create(null, { + _isSignedData: { + value: true + } +}); +const wrapper = (o) => { + return Object.setPrototypeOf(o, proto); +}; +// `isSignedData` will return true for objects created by the various +// `signed*Data` functions. It's meant to implement functionality equivalent +// to `o instanceof SignedData` +export const isSignedData = (o) => { + return !!o && !!Object.getPrototypeOf(o)?._isSignedData; +}; +// TODO: Check for permissions and allowedActions; this requires passing some +// additional context +const signData = function (stateOrContractID, sKeyId, data, extraFields, additionalKeys, additionalData) { + const state = typeof stateOrContractID === 'string' ? rootStateFn()[stateOrContractID] : stateOrContractID; + if (!additionalData) { + throw new ChelErrorSignatureError('Signature additional data must be provided'); + } + // Has the key been revoked? If so, attempt to find an authorized key by the same name + const designatedKey = state?._vm?.authorizedKeys?.[sKeyId]; + if (!designatedKey?.purpose.includes('sig')) { + throw new ChelErrorSignatureKeyNotFound(`Signing key ID ${sKeyId} is missing or is missing signing purpose`); + } + if (designatedKey._notAfterHeight != null) { + const name = state._vm.authorizedKeys[sKeyId].name; + const newKeyId = Object.values(state._vm?.authorizedKeys).find((v) => v._notAfterHeight == null && v.name === name && v.purpose.includes('sig'))?.id; + if (!newKeyId) { + throw new ChelErrorSignatureKeyNotFound(`Signing key ID ${sKeyId} has been revoked and no new key exists by the same name (${name})`); + } + sKeyId = newKeyId; + } + const key = additionalKeys[sKeyId]; + if (!key) { + throw new ChelErrorSignatureKeyNotFound(`Missing signing key ${sKeyId}`); + } + const deserializedKey = typeof key === 'string' ? deserializeKey(key) : key; + const serializedData = JSON.stringify(data, (_, v) => { + if (v && has(v, 'serialize') && typeof v.serialize === 'function') { + if (v.serialize.length === 1) { + return v.serialize(additionalData); + } + else { + return v.serialize(); + } + } + return v; + }); + const payloadToSign = blake32Hash(`${blake32Hash(additionalData)}${blake32Hash(serializedData)}`); + return { + ...extraFields, + _signedData: [ + serializedData, + keyId(deserializedKey), + sign(deserializedKey, payloadToSign) + ] + }; +}; +// TODO: Check for permissions and allowedActions; this requires passing the +// entire SPMessage +const verifySignatureData = function (state, height, data, additionalData) { + if (!state) { + throw new ChelErrorSignatureError('Missing contract state'); + } + if (!isRawSignedData(data)) { + throw new ChelErrorSignatureError('Invalid message format'); + } + if (!Number.isSafeInteger(height) || height < 0) { + throw new ChelErrorSignatureError(`Height ${height} is invalid or out of range`); + } + const [serializedMessage, sKeyId, signature] = data._signedData; + const designatedKey = state._vm?.authorizedKeys?.[sKeyId]; + if (!designatedKey || (height > designatedKey._notAfterHeight) || (height < designatedKey._notBeforeHeight) || !designatedKey.purpose.includes('sig')) { + // These errors (ChelErrorSignatureKeyUnauthorized) are serious and + // indicate a bug. Make them fatal when running integration tests + // (otherwise, they get swallowed and shown as a notification) + if (process.env.CI) { + console.error(`Key ${sKeyId} is unauthorized or expired for the current contract`, { designatedKey, height, state: JSON.parse(JSON.stringify(sbp('state/vuex/state'))) }); + // An unhandled promise rejection will cause Cypress to fail + Promise.reject(new ChelErrorSignatureKeyUnauthorized(`Key ${sKeyId} is unauthorized or expired for the current contract`)); + } + throw new ChelErrorSignatureKeyUnauthorized(`Key ${sKeyId} is unauthorized or expired for the current contract`); + } + // TODO + const deserializedKey = designatedKey.data; + const payloadToSign = blake32Hash(`${blake32Hash(additionalData)}${blake32Hash(serializedMessage)}`); + try { + verifySignature(deserializedKey, payloadToSign, signature); + const message = JSON.parse(serializedMessage); + return [sKeyId, message]; + } + catch (e) { + throw new ChelErrorSignatureError(e?.message || e); + } +}; +export const signedOutgoingData = (stateOrContractID, sKeyId, data, additionalKeys) => { + if (!stateOrContractID || data === undefined || !sKeyId) + throw new TypeError('Invalid invocation'); + if (!additionalKeys) { + additionalKeys = rootStateFn().secretKeys; + } + const extraFields = Object.create(null); + const boundStringValueFn = signData.bind(null, stateOrContractID, sKeyId, data, extraFields, additionalKeys); + const serializefn = (additionalData) => boundStringValueFn(additionalData || ''); + return wrapper({ + get signingKeyId() { + return sKeyId; + }, + get serialize() { + return serializefn; + }, + get toString() { + return (additionalData) => JSON.stringify(this.serialize(additionalData)); + }, + get valueOf() { + return () => data; + }, + get recreate() { + return (data) => signedOutgoingData(stateOrContractID, sKeyId, data, additionalKeys); + }, + get get() { + return (k) => extraFields[k]; + }, + get set() { + return (k, v) => { + extraFields[k] = v; + }; + } + }); +}; +// Used for OP_CONTRACT as a state does not yet exist +export const signedOutgoingDataWithRawKey = (key, data) => { + const sKeyId = keyId(key); + const state = { + _vm: { + authorizedKeys: { + [sKeyId]: { + purpose: ['sig'], + data: serializeKey(key, false), + _notBeforeHeight: 0, + _notAfterHeight: undefined + } + } + } + }; + const extraFields = Object.create(null); + const boundStringValueFn = signData.bind(null, state, sKeyId, data, extraFields, { [sKeyId]: key }); + const serializefn = (additionalData) => boundStringValueFn(additionalData || ''); + return wrapper({ + get signingKeyId() { + return sKeyId; + }, + get serialize() { + return serializefn; + }, + get toString() { + return (additionalData) => JSON.stringify(this.serialize(additionalData)); + }, + get valueOf() { + return () => data; + }, + get recreate() { + return (data) => signedOutgoingDataWithRawKey(key, data); + }, + get get() { + return (k) => extraFields[k]; + }, + get set() { + return (k, v) => { + extraFields[k] = v; + }; + } + }); +}; +export const signedIncomingData = (contractID, state, data, height, additionalData, mapperFn) => { + const stringValueFn = () => data; + let verifySignedValue; + const verifySignedValueFn = () => { + if (verifySignedValue) { + return verifySignedValue[1]; + } + verifySignedValue = verifySignatureData(state || rootStateFn()[contractID], height, data, additionalData); + if (mapperFn) + verifySignedValue[1] = mapperFn(verifySignedValue[1]); + return verifySignedValue[1]; + }; + return wrapper({ + get signingKeyId() { + if (verifySignedValue) + return verifySignedValue[0]; + return signedDataKeyId(data); + }, + get serialize() { + return stringValueFn; + }, + get context() { + return [contractID, data, height, additionalData]; + }, + get toString() { + return () => JSON.stringify(this.serialize()); + }, + get valueOf() { + return verifySignedValueFn; + }, + get toJSON() { + return this.serialize; + }, + get get() { + return (k) => k !== '_signedData' ? data[k] : undefined; + } + }); +}; +export const signedDataKeyId = (data) => { + if (!isRawSignedData(data)) { + throw new ChelErrorSignatureError('Invalid message format'); + } + return data._signedData[1]; +}; +export const isRawSignedData = (data) => { + if (!data || typeof data !== 'object' || !has(data, '_signedData') || !Array.isArray(data._signedData) || data._signedData.length !== 3 || data._signedData.map(v => typeof v).filter(v => v !== 'string').length !== 0) { + return false; + } + return true; +}; +// WARNING: The following function (rawSignedIncomingData) will not check signatures +export const rawSignedIncomingData = (data) => { + if (!isRawSignedData(data)) { + throw new ChelErrorSignatureError('Invalid message format'); + } + const stringValueFn = () => data; + let verifySignedValue; + const verifySignedValueFn = () => { + if (verifySignedValue) { + return verifySignedValue[1]; + } + verifySignedValue = [data._signedData[1], JSON.parse(data._signedData[0])]; + return verifySignedValue[1]; + }; + return wrapper({ + get signingKeyId() { + if (verifySignedValue) + return verifySignedValue[0]; + return signedDataKeyId(data); + }, + get serialize() { + return stringValueFn; + }, + get toString() { + return () => JSON.stringify(this.serialize()); + }, + get valueOf() { + return verifySignedValueFn; + }, + get toJSON() { + return this.serialize; + }, + get get() { + return (k) => k !== '_signedData' ? data[k] : undefined; + } + }); +}; diff --git a/dist/esm/time-sync.d.mts b/dist/esm/time-sync.d.mts new file mode 100644 index 0000000..d451d2b --- /dev/null +++ b/dist/esm/time-sync.d.mts @@ -0,0 +1,2 @@ +declare const _default: string[]; +export default _default; diff --git a/dist/esm/time-sync.mjs b/dist/esm/time-sync.mjs new file mode 100644 index 0000000..b116f26 --- /dev/null +++ b/dist/esm/time-sync.mjs @@ -0,0 +1,123 @@ +import sbp from '@sbp/sbp'; +// `wallBase` is the base used to calculate wall time (i.e., time elapsed as one +// would get from, e.g., looking a clock hanging from a wall). +// Although optimistically +// it has a default value to local time, it'll be updated to the server's time +// once `chelonia/private/startClockSync` is called +// From Wikipedia: 'walltime is the actual time taken from the start of a +// computer program to the end. In other words, it is the difference between +// the time at which a task finishes and the time at which the task started.' +let wallBase = Date.now(); +// `monotonicBase` is the base used to calculate an offset to apply to `wallBase` +// to estimate the server's current wall time. +let monotonicBase = performance.now(); +// `undefined` means the sync process has been stopped, `null` that the current +// request has finished +let resyncTimeout; +let watchdog; +const syncServerTime = async function () { + // Get our current monotonic time + const startTime = performance.now(); + // Now, ask the server for the time + const time = await this.config.fetch(`${this.config.connectionURL}/time`, { signal: this.abortController.signal }); + const requestTimeElapsed = performance.now(); + if (requestTimeElapsed - startTime > 8000) { + throw new Error('Error fetching server time: request took too long'); + } + // If the request didn't succeed, report it + if (!time.ok) + throw new Error('Error fetching server time'); + const serverTime = (new Date(await time.text())).valueOf(); + // If the value could not be parsed, report that as well + if (Number.isNaN(serverTime)) + throw new Error('Unable to parse server time'); + // Adjust `wallBase` based on the elapsed request time. We can't know + // how long it took for the server to respond, but we can estimate that it's + // about half the time from the moment we made the request. + const newMonotonicBase = performance.now(); + wallBase = + serverTime + + (requestTimeElapsed - startTime) / 2 + + // Also take into account the time elapsed between `requestTimeElapsed` + // and this line (which should be very little) + (newMonotonicBase - requestTimeElapsed); + monotonicBase = newMonotonicBase; +}; +export default sbp('sbp/selectors/register', { + 'chelonia/private/startClockSync': function () { + if (resyncTimeout !== undefined) { + throw new Error('chelonia/private/startClockSync has already been called'); + } + // Default re-sync every 5 minutes + const resync = (delay = 300000) => { + // If there's another time sync process in progress, don't do anything + if (resyncTimeout !== null) + return; + const timeout = setTimeout(() => { + // Get the server time + syncServerTime.call(this).then(() => { + // Mark the process as finished + if (resyncTimeout === timeout) + resyncTimeout = null; + // And then restart the listener + resync(); + }).catch(e => { + // If there was an error, log it and possibly attempt again + if (resyncTimeout === timeout) { + // In this case, it was the current task that failed + resyncTimeout = null; + console.error('Error re-syncing server time; will re-attempt in 5s', e); + // Call resync again, with a shorter delay + setTimeout(() => resync(0), 5000); + } + else { + // If there is already another attempt, just log it + console.error('Error re-syncing server time; another attempt is in progress', e); + } + }); + }, delay); + resyncTimeout = timeout; + }; + let wallLast = Date.now(); + let monotonicLast = performance.now(); + // Watchdog to ensure our time doesn't drift. Periodically check for + // differences between the elapsed wall time and the elapsed monotonic + // time + watchdog = setInterval(() => { + const wallNow = Date.now(); + const monotonicNow = performance.now(); + const difference = Math.abs(Math.abs((wallNow - wallLast)) - Math.abs((monotonicNow - monotonicLast))); + // Tolerate up to a 10ms difference + if (difference > 10) { + if (resyncTimeout != null) + clearTimeout(resyncTimeout); + resyncTimeout = null; + resync(0); + } + wallLast = wallNow; + monotonicLast = monotonicNow; + }, 10000); + // Start the sync process + resyncTimeout = null; + resync(0); + }, + 'chelonia/private/stopClockSync': () => { + if (resyncTimeout !== undefined) { + if (watchdog != null) + clearInterval(watchdog); + if (resyncTimeout != null) + clearTimeout(resyncTimeout); + watchdog = undefined; + resyncTimeout = undefined; + } + }, + // Get an estimate of the server's current time based on the time elapsed as + // measured locally (using a monotonic clock), which is used as an offset, and + // a previously retrieved server time. The time value is returned as a UNIX + // _millisecond_ timestamp (milliseconds since 1 Jan 1970 00:00:00 UTC) + 'chelonia/time': function () { + const monotonicNow = performance.now(); + const wallNow = wallBase - monotonicBase + monotonicNow; + return Math.round(wallNow); + } +}); diff --git a/dist/esm/types.d.mts b/dist/esm/types.d.mts new file mode 100644 index 0000000..8b06115 --- /dev/null +++ b/dist/esm/types.d.mts @@ -0,0 +1,324 @@ +import type { Key } from '@chelonia/crypto'; +import type sbp from '@sbp/sbp'; +import type { SPMessage, SPMsgDirection, SPOpType } from './SPMessage.mjs'; +import type { EncryptedData } from './encryptedData.mjs'; +import type { PubSubClient } from './pubsub/index.mjs'; +export type JSONType = null | string | number | boolean | JSONObject | JSONArray; +export interface JSONObject { + [x: string]: JSONType; +} +export type JSONArray = Array; +export type ResType = ResTypeErr | ResTypeOK | ResTypeAlready | ResTypeSub | ResTypeUnsub | ResTypeEntry | ResTypePub; +export type ResTypeErr = 'error'; +export type ResTypeOK = 'success'; +export type ResTypeAlready = 'already'; +export type ResTypeSub = 'sub'; +export type ResTypeUnsub = 'unsub'; +export type ResTypePub = 'pub'; +export type ResTypeEntry = 'entry'; +export type CheloniaConfig = { + [_ in `preOp_${SPOpType}`]?: (message: SPMessage, state: ChelContractState) => boolean; +} & { + [_ in `postOp_${SPOpType}`]?: (message: SPMessage, state: ChelContractState) => boolean; +} & { + connectionURL: string; + stateSelector: string; + contracts: { + defaults: { + modules: Record; + exposedGlobals: object; + allowedDomains: string[]; + allowedSelectors: string[]; + preferSlim: boolean; + }; + overrides: object; + manifests: Record; + }; + whitelisted: (action: string) => boolean; + reactiveSet: (obj: T, key: keyof T, value: T[typeof key]) => void; + fetch: typeof fetch; + reactiveDel: (obj: T, key: keyof T) => void; + acceptAllMessages: boolean; + skipActionProcessing: boolean; + skipSideEffects: boolean; + strictProcessing: boolean; + strictOrdering: boolean; + connectionOptions: { + maxRetries: number; + reconnectOnTimeout: boolean; + }; + preOp?: (message: SPMessage, state: ChelContractState) => boolean; + postOp?: (message: SPMessage, state: ChelContractState) => boolean; + hooks: Partial<{ + preHandleEvent: { + (message: SPMessage): Promise; + } | null; + postHandleEvent: { + (message: SPMessage): Promise; + } | null; + processError: { + (e: unknown, message: SPMessage | null | undefined, meta: object | null | undefined): void; + } | null; + sideEffectError: { + (e: unknown, message?: SPMessage): void; + } | null; + handleEventError: { + (e: unknown, message?: SPMessage): void; + } | null; + syncContractError: { + (e: unknown, contractID: string): void; + } | null; + pubsubError: { + (e: unknown, socket: PubSubClient): void; + } | null; + }>; + skipDecryptionAttempts: boolean; + unwrapMaybeEncryptedData: (data: T | EncryptedData) => { + encryptionKeyId: string | null; + data: T; + } | undefined; +}; +export type SendMessageHooks = Partial<{ + prepublish: (entry: SPMessage) => void | Promise; + onprocessed: (entry: SPMessage) => void; + preSendCheck: (entry: SPMessage, state: ChelContractState) => boolean | Promise; + beforeRequest: (newEntry: SPMessage, oldEntry: SPMessage) => void | Promise; + postpublish: (entry: SPMessage) => void | Promise; +}>; +export type ChelContractProcessMessageObject = Readonly<{ + data: object; + meta: object; + hash: string; + height: number; + contractID: string; + direction: SPMsgDirection; + signingKeyId: string; + signingContractID: string; + innerSigningKeyId?: string | null | undefined; + innerSigningContractID?: string | null | undefined; +}>; +export type ChelContractSideeffectMutationObject = Readonly<{ + data: object; + meta: object; + hash: string; + height: number; + contractID: string; + description: string; + direction: SPMsgDirection; + signingKeyId: string; + signingContractID: string; + innerSigningKeyId?: string | null | undefined; + innerSigningContractID?: string | null | undefined; +}>; +export type CheloniaContractCtx = { + getters: Record(state: ChelContractState, obj: T) => T[K]>; + name: string; + manifest: string; + metadata: { + create: () => object | Promise; + validate: (meta: object, { state, contractID, ...gProxy }: { + state: ChelContractState; + contractID: string; + }) => void | Promise; + }; + sbp: typeof sbp; + state: (contractID: string) => ChelContractState; + actions: Record void | Promise; + process: (message: ChelContractProcessMessageObject, { state, ...gProxy }: { + state: ChelContractState; + }) => void | Promise; + sideEffect?: (mutation: ChelContractSideeffectMutationObject, { state, ...gProxy }: { + state: ChelContractState; + }) => void | Promise; + }>; + methods: Record; +}; +export type CheloniaContext = { + config: CheloniaConfig; + _instance: object; + abortController: AbortController; + state: { + contracts: Record; + pending: string[]; + [x: string]: unknown; + }; + manifestToContract: Record; + whitelistedActions: Record; + currentSyncs: Record; + postSyncOperations: Record>>; + sideEffectStacks: Record[]>; + sideEffectStack: (contractID: string) => Array>; + setPostSyncOp: (contractID: string, key: string, op: Parameters) => void; + transientSecretKeys: Record; + ephemeralReferenceCount: Record; + subscriptionSet: Set; + pending: { + contractID: string; + }[]; + pubsub: import('./pubsub/index.mjs').PubSubClient; + contractsModifiedListener: (contracts: Set, { added, removed }: { + added: string[]; + removed: string[]; + }) => void; + defContractSelectors: string[]; + defContractManifest: string; + defContractSBP: typeof sbp; + defContract: CheloniaContractCtx; +}; +export type ChelContractManifestBody = { + name: string; + version: string; + contract: { + hash: string; + file: string; + }; + contractSlim: { + hash: string; + file: string; + }; + signingKeys: string[]; +}; +export type ChelContractManifest = { + head: string; + body: string; + signature: { + keyId: string; + value: string; + }; +}; +export type ChelFileManifest = { + version: '1.0.0'; + type?: string; + meta?: unknown; + cipher: string; + 'cipher-params'?: unknown; + size: number; + chunks: [number, string][]; + 'name-map'?: Record; + alternatives?: Record; +}; +export type ChelContractKey = { + id: string; + name: string; + purpose: string[]; + ringLevel: number; + permissions: '*' | string[]; + allowedActions?: '*' | string[]; + _notBeforeHeight: number; + _notAfterHeight?: number | undefined; + _private?: string; + foreignKey?: string; + meta?: { + quantity?: number; + expires?: number; + private?: { + transient?: boolean; + content?: string; + shareable?: boolean; + oldKeys?: string; + }; + keyRequest?: { + contractID: string; + reference: string; + responded: string; + }; + }; + data: string; +}; +export type ChelContractState = { + _vm: { + authorizedKeys: Record; + invites?: Record; + type: string; + pendingWatch?: Record; + keyshares?: Record; + sharedKeyIds?: { + id: string; + contractID: string; + height: number; + keyRequestHash?: string; + keyRequestHeight?: number; + }[]; + pendingKeyshares?: Record; + props?: Record; + }; + _volatile?: { + pendingKeyRequests?: { + contractID: string; + hash: string; + name: string; + reference?: string; + }[]; + pendingKeyRevocations?: Record; + watch?: [fkName: string, fkId: string][]; + dirty?: boolean; + resyncing?: boolean; + }; +}; +export type ChelRootState = { + [x: string]: ChelContractState; +} & { + contracts: Record; +}; +export type Response = { + type: ResType; + err?: string; + data?: JSONType; +}; +export type ParsedEncryptedOrUnencryptedMessage = Readonly<{ + contractID: string; + innerSigningKeyId?: string | null | undefined; + encryptionKeyId?: string | null | undefined; + signingKeyId: string; + data: T; + signingContractID?: string | null | undefined; + innerSigningContractID?: string | null | undefined; +}>; +export type ChelKvOnConflictCallback = (args: { + contractID: string; + key: string; + failedData?: JSONType; + status: number; + etag: string | null | undefined; + currentData: JSONType | undefined; + currentValue: ParsedEncryptedOrUnencryptedMessage | undefined; +}) => Promise<[JSONType, string]>; diff --git a/dist/esm/types.mjs b/dist/esm/types.mjs new file mode 100644 index 0000000..689b1f4 --- /dev/null +++ b/dist/esm/types.mjs @@ -0,0 +1,2 @@ +/* eslint-disable no-use-before-define */ +export {}; diff --git a/dist/esm/utils.d.mts b/dist/esm/utils.d.mts new file mode 100644 index 0000000..12b61cb --- /dev/null +++ b/dist/esm/utils.d.mts @@ -0,0 +1,36 @@ +import type { SPKey, SPKeyPurpose, SPKeyUpdate, SPOpValue } from './SPMessage.mjs'; +import { SPMessage } from './SPMessage.mjs'; +import type { EncryptedData } from './encryptedData.mjs'; +import { ChelContractKey, ChelContractState, ChelRootState, CheloniaConfig, CheloniaContext, JSONType } from './types.mjs'; +export declare const findKeyIdByName: (state: ChelContractState, name: string) => string | null | undefined; +export declare const findForeignKeysByContractID: (state: ChelContractState, contractID: string) => string[] | undefined; +export declare const findRevokedKeyIdsByName: (state: ChelContractState, name: string) => string[]; +export declare const findSuitableSecretKeyId: (state: ChelContractState, permissions: "*" | string[], purposes: SPKeyPurpose[], ringLevel?: number, allowedActions?: "*" | string[]) => string | null | undefined; +export declare const findContractIDByForeignKeyId: (state: ChelContractState, keyId: string) => string | null | undefined; +export declare const findSuitablePublicKeyIds: (state: ChelContractState, permissions: "*" | string[], purposes: SPKeyPurpose[], ringLevel?: number) => string[] | null | undefined; +export declare const validateKeyPermissions: (msg: SPMessage, config: CheloniaConfig, state: { + _vm: { + authorizedKeys: ChelContractState["_vm"]["authorizedKeys"]; + }; +}, signingKeyId: string, opT: string, opV: SPOpValue) => boolean; +export declare const validateKeyAddPermissions: (this: CheloniaContext, contractID: string, signingKey: ChelContractKey, state: ChelContractState, v: (ChelContractKey | SPKey | EncryptedData)[], skipPrivateCheck?: boolean) => void; +export declare const validateKeyDelPermissions: (this: CheloniaContext, contractID: string, signingKey: ChelContractKey, state: ChelContractState, v: (string | EncryptedData)[]) => void; +export declare const validateKeyUpdatePermissions: (this: CheloniaContext, contractID: string, signingKey: ChelContractKey, state: ChelContractState, v: (SPKeyUpdate | EncryptedData)[]) => [ChelContractKey[], Record]; +export declare const keyAdditionProcessor: (this: CheloniaContext, _msg: SPMessage, hash: string, keys: (ChelContractKey | SPKey | EncryptedData)[], state: ChelContractState, contractID: string, _signingKey: ChelContractKey, internalSideEffectStack?: (({ state, message }: { + state: ChelContractState; + message: SPMessage; +}) => void)[]) => void; +export declare const subscribeToForeignKeyContracts: (this: CheloniaContext, contractID: string, state: ChelContractState) => void; +export declare const recreateEvent: (entry: SPMessage, state: ChelContractState, contractsState: ChelRootState["contracts"][string]) => undefined | SPMessage; +export declare const getContractIDfromKeyId: (contractID: string, signingKeyId: string | null | undefined, state: ChelContractState) => string | null | undefined; +export declare function eventsAfter(this: CheloniaContext, contractID: string, sinceHeight: number, limit?: number, sinceHash?: string, { stream }?: { + stream: boolean; +}): ReadableStream | Promise; +export declare function buildShelterAuthorizationHeader(this: CheloniaContext, contractID: string, state?: ChelContractState): string; +export declare function verifyShelterAuthorizationHeader(authorization: string, rootState?: object): string; +export declare const clearObject: (o: object) => void; +export declare const reactiveClearObject: (o: T, fn: (o: T, k: keyof T) => void) => void; +export declare const checkCanBeGarbageCollected: (this: CheloniaContext, id: string) => boolean; +export declare const collectEventStream: (s: ReadableStream) => Promise; +export declare const logEvtError: (msg: SPMessage, ...args: unknown[]) => void; +export declare const handleFetchResult: (type: "text" | "json" | "blob") => ((r: Response) => Promise); diff --git a/dist/esm/utils.mjs b/dist/esm/utils.mjs new file mode 100644 index 0000000..cde916f --- /dev/null +++ b/dist/esm/utils.mjs @@ -0,0 +1,819 @@ +import { deserializeKey, serializeKey, sign, verifySignature } from '@chelonia/crypto'; +import sbp from '@sbp/sbp'; +import { has, omit } from 'turtledash'; +import { SPMessage } from './SPMessage.mjs'; +import { Secret } from './Secret.mjs'; +import { INVITE_STATUS } from './constants.mjs'; +import { ChelErrorForkedChain, ChelErrorResourceGone, ChelErrorUnexpectedHttpResponseCode, ChelErrorWarning } from './errors.mjs'; +import { CONTRACT_IS_PENDING_KEY_REQUESTS } from './events.mjs'; +import { b64ToStr } from './functions.mjs'; +import { isSignedData } from './signedData.mjs'; +const MAX_EVENTS_AFTER = Number.parseInt(process.env.MAX_EVENTS_AFTER || '', 10) || Infinity; +export const findKeyIdByName = (state, name) => state._vm?.authorizedKeys && Object.values((state._vm.authorizedKeys)).find((k) => k.name === name && k._notAfterHeight == null)?.id; +export const findForeignKeysByContractID = (state, contractID) => state._vm?.authorizedKeys && ((Object.values((state._vm.authorizedKeys)))).filter((k) => k._notAfterHeight == null && k.foreignKey?.includes(contractID)).map(k => k.id); +export const findRevokedKeyIdsByName = (state, name) => state._vm?.authorizedKeys && ((Object.values((state._vm.authorizedKeys) || {}))).filter((k) => k.name === name && k._notAfterHeight != null).map(k => k.id); +export const findSuitableSecretKeyId = (state, permissions, purposes, ringLevel, allowedActions) => { + return state._vm?.authorizedKeys && + Object.values((state._vm.authorizedKeys)) + .filter((k) => { + return k._notAfterHeight == null && + (k.ringLevel <= (ringLevel ?? Number.POSITIVE_INFINITY)) && + sbp('chelonia/haveSecretKey', k.id) && + (Array.isArray(permissions) + ? permissions.reduce((acc, permission) => acc && (k.permissions === '*' || k.permissions.includes(permission)), true) + : permissions === k.permissions) && + purposes.reduce((acc, purpose) => acc && k.purpose.includes(purpose), true) && + (Array.isArray(allowedActions) + ? allowedActions.reduce((acc, action) => acc && (k.allowedActions === '*' || !!k.allowedActions?.includes(action)), true) + : allowedActions ? allowedActions === k.allowedActions : true); + }) + .sort((a, b) => b.ringLevel - a.ringLevel)[0]?.id; +}; +export const findContractIDByForeignKeyId = (state, keyId) => { + let fk; + if (!keyId || !(fk = state?._vm?.authorizedKeys?.[keyId]?.foreignKey)) + return; + try { + const fkUrl = new URL(fk); + return fkUrl.pathname; + } + catch { } +}; +// TODO: Resolve inviteKey being added (doesn't have krs permission) +export const findSuitablePublicKeyIds = (state, permissions, purposes, ringLevel) => { + return state._vm?.authorizedKeys && + Object.values((state._vm.authorizedKeys)).filter((k) => (k._notAfterHeight == null) && + (k.ringLevel <= (ringLevel ?? Number.POSITIVE_INFINITY)) && + (Array.isArray(permissions) + ? permissions.reduce((acc, permission) => acc && (k.permissions === '*' || k.permissions.includes(permission)), true) + : permissions === k.permissions) && + purposes.reduce((acc, purpose) => acc && k.purpose.includes(purpose), true)) + .sort((a, b) => b.ringLevel - a.ringLevel) + .map((k) => k.id); +}; +const validateActionPermissions = (msg, signingKey, state, opT, opV) => { + const data = isSignedData(opV) + ? opV.valueOf() + : opV; + if (signingKey.allowedActions !== '*' && (!Array.isArray(signingKey.allowedActions) || + !signingKey.allowedActions.includes(data.action))) { + logEvtError(msg, `Signing key ${signingKey.id} is not allowed for action ${data.action}`); + return false; + } + if (isSignedData(opV)) { + const s = opV; + const innerSigningKey = state._vm?.authorizedKeys?.[s.signingKeyId]; + // For outgoing messages, we may be using an inner signing key that isn't + // available for us to see. In this case, we ignore the missing key. + // For incoming messages, we must check permissions and a missing + // key means no permissions. + if (!innerSigningKey && msg._direction === 'outgoing') + return true; + if (!innerSigningKey || + !Array.isArray(innerSigningKey.purpose) || + !innerSigningKey.purpose.includes('sig') || + (innerSigningKey.permissions !== '*' && + (!Array.isArray(innerSigningKey.permissions) || + !innerSigningKey.permissions.includes(opT + '#inner')))) { + logEvtError(msg, `Signing key ${s.signingKeyId} is missing permissions for operation ${opT}`); + return false; + } + if (innerSigningKey.allowedActions !== '*' && (!Array.isArray(innerSigningKey.allowedActions) || + !innerSigningKey.allowedActions.includes(data.action + '#inner'))) { + logEvtError(msg, `Signing key ${innerSigningKey.id} is not allowed for action ${data.action}`); + return false; + } + } + return true; +}; +export const validateKeyPermissions = (msg, config, state, signingKeyId, opT, opV) => { + const signingKey = state._vm?.authorizedKeys?.[signingKeyId]; + if (!signingKey || + !Array.isArray(signingKey.purpose) || + !signingKey.purpose.includes('sig') || + (signingKey.permissions !== '*' && + (!Array.isArray(signingKey.permissions) || + !signingKey.permissions.includes(opT)))) { + logEvtError(msg, `Signing key ${signingKeyId} is missing permissions for operation ${opT}`); + return false; + } + if (opT === SPMessage.OP_ACTION_UNENCRYPTED && + !validateActionPermissions(msg, signingKey, state, opT, opV)) { + return false; + } + if (!config.skipActionProcessing && + opT === SPMessage.OP_ACTION_ENCRYPTED && + !validateActionPermissions(msg, signingKey, state, opT, opV.valueOf())) { + return false; + } + return true; +}; +export const validateKeyAddPermissions = function (contractID, signingKey, state, v, skipPrivateCheck) { + const signingKeyPermissions = Array.isArray(signingKey.permissions) ? new Set(signingKey.permissions) : signingKey.permissions; + const signingKeyAllowedActions = Array.isArray(signingKey.allowedActions) ? new Set(signingKey.allowedActions) : signingKey.allowedActions; + if (!state._vm?.authorizedKeys?.[signingKey.id]) + throw new Error('Singing key for OP_KEY_ADD or OP_KEY_UPDATE must exist in _vm.authorizedKeys. contractID=' + contractID + ' signingKeyId=' + signingKey.id); + const localSigningKey = state._vm.authorizedKeys[signingKey.id]; + v.forEach(wk => { + const data = this.config.unwrapMaybeEncryptedData(wk); + if (!data) + return; + const k = data.data; + if (!skipPrivateCheck && signingKey._private && !data.encryptionKeyId) { + throw new Error('Signing key is private but it tried adding a public key'); + } + if (!Number.isSafeInteger(k.ringLevel) || k.ringLevel < localSigningKey.ringLevel) { + throw new Error('Signing key has ringLevel ' + localSigningKey.ringLevel + ' but attempted to add or update a key with ringLevel ' + k.ringLevel); + } + if (signingKeyPermissions !== '*') { + if (!Array.isArray(k.permissions) || !k.permissions.reduce((acc, cv) => acc && signingKeyPermissions.has(cv), true)) { + throw new Error('Unable to add or update a key with more permissions than the signing key. signingKey permissions: ' + String(signingKey?.permissions) + '; key add permissions: ' + String(k.permissions)); + } + } + if (signingKeyAllowedActions !== '*' && k.allowedActions) { + if (!signingKeyAllowedActions || !Array.isArray(k.allowedActions) || !k.allowedActions.reduce((acc, cv) => acc && signingKeyAllowedActions.has(cv), true)) { + throw new Error('Unable to add or update a key with more allowed actions than the signing key. signingKey allowed actions: ' + String(signingKey?.allowedActions) + '; key add allowed actions: ' + String(k.allowedActions)); + } + } + }); +}; +export const validateKeyDelPermissions = function (contractID, signingKey, state, v) { + if (!state._vm?.authorizedKeys?.[signingKey.id]) + throw new Error('Singing key for OP_KEY_DEL must exist in _vm.authorizedKeys. contractID=' + contractID + ' signingKeyId=' + signingKey.id); + const localSigningKey = state._vm.authorizedKeys[signingKey.id]; + v + .forEach((wid) => { + const data = this.config.unwrapMaybeEncryptedData(wid); + if (!data) + return; + const id = data.data; + const k = state._vm.authorizedKeys[id]; + if (!k) { + throw new Error('Nonexisting key ID ' + id); + } + if (signingKey._private) { + throw new Error('Signing key is private'); + } + if (!k._private !== !data.encryptionKeyId) { + throw new Error('_private attribute must be preserved'); + } + if (!Number.isSafeInteger(k.ringLevel) || k.ringLevel < localSigningKey.ringLevel) { + throw new Error('Signing key has ringLevel ' + localSigningKey.ringLevel + ' but attempted to remove a key with ringLevel ' + k.ringLevel); + } + }); +}; +export const validateKeyUpdatePermissions = function (contractID, signingKey, state, v) { + const updatedMap = Object.create(null); + const keys = v.map((wuk) => { + const data = this.config.unwrapMaybeEncryptedData(wuk); + if (!data) + return undefined; + const uk = data.data; + const existingKey = state._vm.authorizedKeys[uk.oldKeyId]; + if (!existingKey) { + throw new ChelErrorWarning('Missing old key ID ' + uk.oldKeyId); + } + if (!existingKey._private !== !data.encryptionKeyId) { + throw new Error('_private attribute must be preserved'); + } + if (uk.name !== existingKey.name) { + throw new Error('Name cannot be updated'); + } + if (!uk.id !== !uk.data) { + throw new Error('Both or none of the id and data attributes must be provided. Old key ID: ' + uk.oldKeyId); + } + if (uk.data && existingKey.meta?.private && !(uk.meta?.private)) { + throw new Error('Missing private key. Old key ID: ' + uk.oldKeyId); + } + if (uk.id && uk.id !== uk.oldKeyId) { + updatedMap[uk.id] = uk.oldKeyId; + } + // Discard `_notAfterHeight` and `_notBeforeHeight`, since retaining them + // can cause issues reprocessing messages. + // An example is reprocessing old messages in a chatroom using + // `chelonia/in/processMessage`: cloning `_notAfterHeight` will break key + // rotations, since the new key will have the same expiration value as the + // old key (the new key is supposed to have no expiration height). + const updatedKey = omit(existingKey, ['_notAfterHeight', '_notBeforeHeight']); + // Set the corresponding updated attributes + if (uk.permissions) { + updatedKey.permissions = uk.permissions; + } + if (uk.allowedActions) { + updatedKey.allowedActions = uk.allowedActions; + } + if (uk.purpose) { + updatedKey.purpose = uk.purpose; + } + if (uk.meta) { + updatedKey.meta = uk.meta; + } + if (uk.id) { + updatedKey.id = uk.id; + } + if (uk.data) { + updatedKey.data = uk.data; + } + return updatedKey; + // eslint-disable-next-line no-use-before-define + }).filter(Boolean); + validateKeyAddPermissions.call(this, contractID, signingKey, state, keys, true); + return [keys, updatedMap]; +}; +export const keyAdditionProcessor = function (_msg, hash, keys, state, contractID, _signingKey, internalSideEffectStack) { + const decryptedKeys = []; + const keysToPersist = []; + const storeSecretKey = (key, decryptedKey) => { + const decryptedDeserializedKey = deserializeKey(decryptedKey); + const transient = !!key.meta?.private?.transient; + sbp('chelonia/storeSecretKeys', new Secret([{ + key: decryptedDeserializedKey, + // We always set this to true because this could be done from + // an outgoing message + transient: true + }])); + if (!transient) { + keysToPersist.push({ key: decryptedDeserializedKey, transient }); + } + }; + for (const wkey of keys) { + const data = this.config.unwrapMaybeEncryptedData(wkey); + if (!data) + continue; + const key = data.data; + let decryptedKey; + // Does the key have key.meta?.private? If so, attempt to decrypt it + if (key.meta?.private && key.meta.private.content) { + if (key.id && + key.meta.private.content && + !sbp('chelonia/haveSecretKey', key.id, !key.meta.private.transient)) { + const decryptedKeyResult = this.config.unwrapMaybeEncryptedData(key.meta.private.content); + // Ignore data that couldn't be decrypted + if (decryptedKeyResult) { + // Data aren't encrypted + if (decryptedKeyResult.encryptionKeyId == null) { + throw new Error('Expected encrypted data but got unencrypted data for key with ID: ' + key.id); + } + decryptedKey = decryptedKeyResult.data; + decryptedKeys.push([key.id, decryptedKey]); + storeSecretKey(key, decryptedKey); + } + } + } + // Is this a #sak + if (key.name === '#sak') { + if (data.encryptionKeyId) { + throw new Error('#sak may not be encrypted'); + } + if (key.permissions && (!Array.isArray(key.permissions) || key.permissions.length !== 0)) { + throw new Error('#sak may not have permissions'); + } + if (!Array.isArray(key.purpose) || key.purpose.length !== 1 || key.purpose[0] !== 'sak') { + throw new Error("#sak must have exactly one purpose: 'sak'"); + } + if (key.ringLevel !== 0) { + throw new Error('#sak must have ringLevel 0'); + } + } + // Is this a an invite key? If so, run logic for invite keys and invitation + // accounting + if (key.name.startsWith('#inviteKey-')) { + if (!state._vm.invites) + state._vm.invites = Object.create(null); + const inviteSecret = decryptedKey || (has(this.transientSecretKeys, key.id) + ? serializeKey(this.transientSecretKeys[key.id], true) + : undefined); + state._vm.invites[key.id] = { + status: INVITE_STATUS.VALID, + initialQuantity: key.meta.quantity, + quantity: key.meta.quantity, + expires: key.meta.expires, + inviteSecret: inviteSecret, + responses: [] + }; + } + // Is this KEY operation the result of requesting keys for another contract? + if (key.meta?.keyRequest?.contractID && findSuitableSecretKeyId(state, [SPMessage.OP_KEY_ADD], ['sig'])) { + const data = this.config.unwrapMaybeEncryptedData(key.meta.keyRequest.contractID); + // Are we subscribed to this contract? + // If we are not subscribed to the contract, we don't set pendingKeyRequests because we don't need that contract's state + // Setting pendingKeyRequests in these cases could result in issues + // when a corresponding OP_KEY_SHARE is received, which could trigger subscribing to this previously unsubscribed to contract + if (data && internalSideEffectStack) { + const keyRequestContractID = data.data; + const reference = this.config.unwrapMaybeEncryptedData(key.meta.keyRequest.reference); + // Since now we'll make changes to keyRequestContractID, we need to + // do this while no other operations are running for that + // contract + internalSideEffectStack.push(() => { + sbp('chelonia/private/queueEvent', keyRequestContractID, () => { + const rootState = sbp(this.config.stateSelector); + const originatingContractState = rootState[contractID]; + if (sbp('chelonia/contract/hasKeyShareBeenRespondedBy', originatingContractState, keyRequestContractID, reference)) { + // In the meantime, our key request has been responded, so we + // don't need to set pendingKeyRequests. + return; + } + if (!has(rootState, keyRequestContractID)) + this.config.reactiveSet(rootState, keyRequestContractID, Object.create(null)); + const targetState = rootState[keyRequestContractID]; + if (!targetState._volatile) { + this.config.reactiveSet(targetState, '_volatile', Object.create(null)); + } + if (!targetState._volatile.pendingKeyRequests) { + this.config.reactiveSet(rootState[keyRequestContractID]._volatile, 'pendingKeyRequests', []); + } + if (targetState._volatile.pendingKeyRequests.some((pkr) => { + return pkr && pkr.contractID === contractID && pkr.hash === hash; + })) { + // This pending key request has already been registered. + // Nothing left to do. + return; + } + // Mark the contract for which keys were requested as pending keys + // The hash (of the current message) is added to this dictionary + // for cross-referencing puposes. + targetState._volatile.pendingKeyRequests.push({ contractID, name: key.name, hash, reference: reference?.data }); + this.setPostSyncOp(contractID, 'pending-keys-for-' + keyRequestContractID, ['okTurtles.events/emit', CONTRACT_IS_PENDING_KEY_REQUESTS, { contractID: keyRequestContractID }]); + }).catch((e) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error('Error while setting or updating pendingKeyRequests', { contractID, keyRequestContractID, reference }, e); + }); + }); + } + } + } + // Any persistent keys are stored as a side-effect + if (keysToPersist.length) { + internalSideEffectStack?.push(() => { + sbp('chelonia/storeSecretKeys', new Secret(keysToPersist)); + }); + } + internalSideEffectStack?.push(() => subscribeToForeignKeyContracts.call(this, contractID, state)); +}; +export const subscribeToForeignKeyContracts = function (contractID, state) { + try { + Object.values(state._vm.authorizedKeys).filter((key) => !!((key)).foreignKey && findKeyIdByName(state, ((key)).name) != null).forEach((key) => { + const foreignKey = String(key.foreignKey); + const fkUrl = new URL(foreignKey); + const foreignContract = fkUrl.pathname; + const foreignKeyName = fkUrl.searchParams.get('keyName'); + if (!foreignContract || !foreignKeyName) { + console.warn('Invalid foreign key: missing contract or key name', { contractID, keyId: key.id }); + return; + } + const rootState = sbp(this.config.stateSelector); + const signingKey = findSuitableSecretKeyId(state, [SPMessage.OP_KEY_DEL], ['sig'], key.ringLevel); + const canMirrorOperations = !!signingKey; + // If we cannot mirror operations, then there is nothing left to do + if (!canMirrorOperations) + return; + // If the key is already being watched, do nothing + if (Array.isArray(rootState?.[foreignContract]?._volatile?.watch)) { + if (rootState[foreignContract]._volatile.watch.find((v) => v[0] === key.name && v[1] === contractID)) + return; + } + if (!has(state._vm, 'pendingWatch')) + this.config.reactiveSet(state._vm, 'pendingWatch', Object.create(null)); + if (!has(state._vm.pendingWatch, foreignContract)) + this.config.reactiveSet(state._vm.pendingWatch, foreignContract, []); + if (!state._vm.pendingWatch[foreignContract].find(([n]) => n === foreignKeyName)) { + state._vm.pendingWatch[foreignContract].push([foreignKeyName, key.id]); + } + this.setPostSyncOp(contractID, `watchForeignKeys-${contractID}`, ['chelonia/private/watchForeignKeys', contractID]); + }); + } + catch (e) { + console.warn('Error at subscribeToForeignKeyContracts: ' + (e.message || e)); + } +}; +// Messages might be sent before receiving already posted messages, which will +// result in a conflict +// When resending a message, race conditions might also occur (for example, if +// key rotation is required and there are many clients simultaneously online, it +// may be performed by all connected clients at once). +// The following function handles re-signing of messages when a conflict +// occurs (required because the message's previousHEAD will change) as well as +// duplicate operations. For operations involving keys, the payload will be +// rewritten to eliminate no-longer-relevant keys. In most cases, this would +// result in an empty payload, in which case the message is omitted entirely. +export const recreateEvent = (entry, state, contractsState) => { + const { HEAD: previousHEAD, height: previousHeight, previousKeyOp } = contractsState || {}; + if (!previousHEAD) { + throw new Error('recreateEvent: Giving up because the contract has been removed'); + } + const head = entry.head(); + const [opT, rawOpV] = entry.rawOp(); + const recreateOperation = (opT, rawOpV) => { + const opV = rawOpV.valueOf(); + const recreateOperationInternal = (opT, opV) => { + let newOpV; + if (opT === SPMessage.OP_KEY_ADD) { + if (!Array.isArray(opV)) + throw new Error('Invalid message format'); + newOpV = opV.filter((k) => { + const kId = k.valueOf().id; + return !has(state._vm.authorizedKeys, kId) || state._vm.authorizedKeys[kId]._notAfterHeight != null; + }); + // Has this key already been added? (i.e., present in authorizedKeys) + if (newOpV.length === 0) { + console.info('Omitting empty OP_KEY_ADD', { head }); + } + else if (newOpV.length === opV.length) { + return opV; + } + } + else if (opT === SPMessage.OP_KEY_DEL) { + if (!Array.isArray(opV)) + throw new Error('Invalid message format'); + // Has this key already been removed? (i.e., no longer in authorizedKeys) + newOpV = opV.filter((keyId) => { + const kId = Object(keyId).valueOf(); + return has(state._vm.authorizedKeys, kId) && state._vm.authorizedKeys[kId]._notAfterHeight == null; + }); + if (newOpV.length === 0) { + console.info('Omitting empty OP_KEY_DEL', { head }); + } + else if (newOpV.length === opV.length) { + return opV; + } + } + else if (opT === SPMessage.OP_KEY_UPDATE) { + if (!Array.isArray(opV)) + throw new Error('Invalid message format'); + // Has this key already been replaced? (i.e., no longer in authorizedKeys) + newOpV = opV.filter((k) => { + const oKId = k.valueOf().oldKeyId; + const nKId = k.valueOf().id; + return nKId == null || (has(state._vm.authorizedKeys, oKId) && state._vm.authorizedKeys[oKId]._notAfterHeight == null); + }); + if (newOpV.length === 0) { + console.info('Omitting empty OP_KEY_UPDATE', { head }); + } + else if (newOpV.length === opV.length) { + return opV; + } + } + else if (opT === SPMessage.OP_ATOMIC) { + if (!Array.isArray(opV)) + throw new Error('Invalid message format'); + newOpV = opV.map(([t, v]) => [t, recreateOperationInternal(t, v)]).filter(([, v]) => !!v); + if (newOpV.length === 0) { + console.info('Omitting empty OP_ATOMIC', { head }); + } + else if (newOpV.length === opV.length && newOpV.reduce((acc, cv, i) => acc && cv === opV[i], true)) { + return opV; + } + else { + return newOpV; + } + } + else { + return opV; + } + }; + const newOpV = recreateOperationInternal(opT, opV); + if (newOpV === opV) { + return rawOpV; + } + else if (newOpV === undefined) { + return; + } + if (typeof rawOpV.recreate !== 'function') { + throw new Error('Unable to recreate operation'); + } + return rawOpV.recreate(newOpV); + }; + const newRawOpV = recreateOperation(opT, rawOpV); + if (!newRawOpV) + return; + const newOp = [opT, newRawOpV]; + entry = SPMessage.cloneWith(head, newOp, { previousKeyOp, previousHEAD, height: previousHeight + 1 }); + return entry; +}; +export const getContractIDfromKeyId = (contractID, signingKeyId, state) => { + if (!signingKeyId) + return; + return signingKeyId && state._vm?.authorizedKeys?.[signingKeyId]?.foreignKey + ? new URL(state._vm.authorizedKeys[signingKeyId].foreignKey).pathname + : contractID; +}; +export function eventsAfter(contractID, sinceHeight, limit, sinceHash, { stream } = { stream: true }) { + if (!contractID) { + // Avoid making a network roundtrip to tell us what we already know + throw new Error('Missing contract ID'); + } + let lastUrl; + const fetchEventsStreamReader = async () => { + requestLimit = Math.min(limit ?? MAX_EVENTS_AFTER, remainingEvents); + lastUrl = `${this.config.connectionURL}/eventsAfter/${contractID}/${sinceHeight}${Number.isInteger(requestLimit) ? `/${requestLimit}` : ''}`; + const eventsResponse = await this.config.fetch(lastUrl, { signal }); + if (!eventsResponse.ok) { + const msg = `${eventsResponse.status}: ${eventsResponse.statusText}`; + if (eventsResponse.status === 404 || eventsResponse.status === 410) + throw new ChelErrorResourceGone(msg, { cause: eventsResponse.status }); + throw new ChelErrorUnexpectedHttpResponseCode(msg, { cause: eventsResponse.status }); + } + if (!eventsResponse.body) + throw new Error('Missing body'); + latestHeight = parseInt(eventsResponse.headers.get('shelter-headinfo-height'), 10); + if (!Number.isSafeInteger(latestHeight)) + throw new Error('Invalid latest height'); + requestCount++; + return eventsResponse.body.getReader(); + }; + if (!Number.isSafeInteger(sinceHeight) || sinceHeight < 0) { + throw new TypeError('Invalid since height value. Expected positive integer.'); + } + const signal = this.abortController.signal; + let requestCount = 0; + let remainingEvents = limit ?? Number.POSITIVE_INFINITY; + let eventsStreamReader; + let latestHeight; + let state = 'fetch'; + let requestLimit; + let count; + let buffer = ''; + let currentEvent; + // return ReadableStream with a custom pull function to handle streamed data + const s = new ReadableStream({ + // The pull function is called whenever the internal buffer of the stream + // becomes empty and needs more data. + async pull(controller) { + try { + for (;;) { + // Handle different states of the stream reading process. + switch (state) { + // When in 'fetch' state, initiate a new fetch request to obtain a + // stream reader for events. + case 'fetch': { + eventsStreamReader = await fetchEventsStreamReader(); + // Transition to reading the new response and reset the processed + // events counter + state = 'read-new-response'; + count = 0; + break; + } + case 'read-eos': // End of stream case + case 'read-new-response': // Just started reading a new response + case 'read': { // Reading from the response stream + const { done, value } = await eventsStreamReader.read(); + // If done, determine if the stream should close or fetch more + // data by making a new request + if (done) { + // No more events to process or reached the latest event + // Using `>=` instead of `===` to avoid an infinite loop in the + // event of data loss on the server. + if (remainingEvents === 0 || sinceHeight >= latestHeight) { + controller.close(); + return; + } + else if (state === 'read-new-response' || buffer) { + // If done prematurely, throw an error + throw new Error('Invalid response: done too early'); + } + else { + // If there are still events to fetch, switch state to fetch + state = 'fetch'; + break; + } + } + if (!value) { + // If there's no value (e.g., empty response), throw an error + throw new Error('Invalid response: missing body'); + } + // Concatenate new data to the buffer, trimming any + // leading/trailing whitespace (the response is a JSON array of + // base64-encoded data, meaning that whitespace is not significant) + buffer = buffer + Buffer.from(value).toString().trim(); + // If there was only whitespace, try reading again + if (!buffer) + break; + if (state === 'read-new-response') { + // Response is in JSON format, so we look for the start of an + // array (`[`) + if (buffer[0] !== '[') { + throw new Error('Invalid response: no array start delimiter'); + } + // Trim the array start delimiter from the buffer + buffer = buffer.slice(1); + } + else if (state === 'read-eos') { + // If in 'read-eos' state and still reading data, it's an error + // because the response isn't valid JSON (there should be + // nothing other than whitespace after `]`) + throw new Error('Invalid data at the end of response'); + } + // If not handling new response or end-of-stream, switch to + // processing events + state = 'events'; + break; + } + case 'events': { + // Process events by looking for a comma or closing bracket that + // indicates the end of an event + const nextIdx = buffer.search(/(?<=\s*)[,\]]/); + // If the end of the event isn't found, go back to reading more + // data + if (nextIdx < 0) { + state = 'read'; + break; + } + let enqueued = false; + try { + // Extract the current event's value and trim whitespace + const eventValue = buffer.slice(0, nextIdx).trim(); + if (eventValue) { + // Check if the event limit is reached; if so, throw an error + if (count === requestLimit) { + throw new Error('Received too many events'); + } + currentEvent = JSON.parse(b64ToStr(JSON.parse(eventValue))).message; + if (count === 0) { + const hash = SPMessage.deserializeHEAD(currentEvent).hash; + const height = SPMessage.deserializeHEAD(currentEvent).head.height; + if (height !== sinceHeight || (sinceHash && sinceHash !== hash)) { + if (height === sinceHeight && sinceHash && sinceHash !== hash) { + throw new ChelErrorForkedChain(`Forked chain: hash(${hash}) !== since(${sinceHash})`); + } + else { + throw new Error(`Unexpected data: hash(${hash}) !== since(${sinceHash || ''}) or height(${height}) !== since(${sinceHeight})`); + } + } + } + // If this is the first event in a second or later request, + // drop the event because it's already been included in + // a previous response + if (count++ !== 0 || requestCount !== 0) { + controller.enqueue(currentEvent); + enqueued = true; + remainingEvents--; + } + } + // If the stream is finished (indicated by a closing bracket), + // update `since` (to make the next request if needed) and + // switch to 'read-eos'. + if (buffer[nextIdx] === ']') { + if (currentEvent) { + const deserialized = SPMessage.deserializeHEAD(currentEvent); + sinceHeight = deserialized.head.height; + sinceHash = deserialized.hash; + state = 'read-eos'; + } + else { + // If the response came empty, assume there are no more events + // after. Mostly this prevents infinite loops if a server is + // claiming there are more events than it's willing to return + // data for. + state = 'eod'; + } + // This should be an empty string now + buffer = buffer.slice(nextIdx + 1).trim(); + } + else if (currentEvent) { + // Otherwise, move the buffer pointer to the next event + buffer = buffer.slice(nextIdx + 1).trimStart(); + } + else { + // If the end delimiter (`]`) is missing, throw an error + throw new Error('Missing end delimiter'); + } + // If an event was successfully enqueued, exit the loop to wait + // for the next pull request + if (enqueued) { + return; + } + } + catch (e) { + console.error('[chelonia] Error during event parsing', e); + throw e; + } + break; + } + case 'eod': { + if (remainingEvents === 0 || sinceHeight >= latestHeight) { + controller.close(); + } + else { + throw new Error('Unexpected end of data'); + } + return; + } + } + } + } + catch (e) { + console.error('[eventsAfter] Error', { lastUrl }, e); + eventsStreamReader?.cancel('Error during pull').catch(e2 => { + console.error('Error canceling underlying event stream reader on error', e, e2); + }); + throw e; + } + } + }); + if (stream) + return s; + // Workaround for + return collectEventStream(s); +} +export function buildShelterAuthorizationHeader(contractID, state) { + if (!state) + state = sbp(this.config.stateSelector)[contractID]; + const SAKid = findKeyIdByName(state, '#sak'); + if (!SAKid) { + throw new Error(`Missing #sak in ${contractID}`); + } + const SAK = this.transientSecretKeys[SAKid]; + if (!SAK) { + throw new Error(`Missing secret #sak (${SAKid}) in ${contractID}`); + } + const deserializedSAK = typeof SAK === 'string' ? deserializeKey(SAK) : SAK; + const nonceBytes = new Uint8Array(15); + globalThis.crypto.getRandomValues(nonceBytes); + // . + const data = `${contractID} ${sbp('chelonia/time')}.${Buffer.from(nonceBytes).toString('base64')}`; + // shelter .. + return `shelter ${data}.${sign(deserializedSAK, data)}`; +} +export function verifyShelterAuthorizationHeader(authorization, rootState) { + const regex = /^shelter (([a-zA-Z0-9]+) ([0-9]+)\.([a-zA-Z0-9+/=]{20}))\.([a-zA-Z0-9+/=]+)$/i; + if (authorization.length > 1024) { + throw new Error('Authorization header too long'); + } + const matches = authorization.match(regex); + if (!matches) { + throw new Error('Unable to parse shelter authorization header'); + } + // TODO: Remember nonces and reject already used ones + const [, data, contractID, timestamp, , signature] = matches; + if (Math.abs(parseInt(timestamp) - Date.now()) > 60e3) { + throw new Error('Invalid signature time range'); + } + if (!rootState) + rootState = sbp('chelonia/rootState'); + if (!has(rootState, contractID)) { + throw new Error(`Contract ${contractID} from shelter authorization header not found`); + } + const SAKid = findKeyIdByName(rootState[contractID], '#sak'); + if (!SAKid) { + throw new Error(`Missing #sak in ${contractID}`); + } + const SAK = rootState[contractID]._vm.authorizedKeys[SAKid].data; + if (!SAK) { + throw new Error(`Missing secret #sak (${SAKid}) in ${contractID}`); + } + const deserializedSAK = deserializeKey(SAK); + verifySignature(deserializedSAK, data, signature); + return contractID; +} +export const clearObject = (o) => { + Object.keys(o).forEach((k) => delete o[k]); +}; +export const reactiveClearObject = (o, fn) => { + Object.keys(o).forEach((k) => fn(o, k)); +}; +export const checkCanBeGarbageCollected = function (id) { + const rootState = sbp(this.config.stateSelector); + return ( + // Check persistent references + (!has(rootState.contracts, id) || !rootState.contracts[id] || !has(rootState.contracts[id], 'references')) && + // Check ephemeral references + !has(this.ephemeralReferenceCount, id)) && + // Check foreign keys (i.e., that no keys from this contract are being watched) + (!has(rootState, id) || !has(rootState[id], '_volatile') || !has(rootState[id]._volatile, 'watch') || rootState[id]._volatile.watch.length === 0 || rootState[id]._volatile.watch.filter(([, cID]) => this.subscriptionSet.has(cID)).length === 0); +}; +export const collectEventStream = async (s) => { + const reader = s.getReader(); + const r = []; + for (;;) { + const { done, value } = await reader.read(); + if (done) + break; + r.push(value); + } + return r; +}; +// Used inside processing functions for displaying errors at the 'warn' level +// for outgoing messages to increase the signal-to-noise error. See issue #2773. +export const logEvtError = (msg, ...args) => { + if (msg._direction === 'outgoing') { + console.warn(...args); + } + else { + console.error(...args); + } +}; +export const handleFetchResult = (type) => { + return function (r) { + if (!r.ok) { + const msg = `${r.status}: ${r.statusText}`; + // 410 is sometimes special (for example, it can mean that a contract or + // a file been deleted) + if (r.status === 404 || r.status === 410) + throw new ChelErrorResourceGone(msg, { cause: r.status }); + throw new ChelErrorUnexpectedHttpResponseCode(msg, { cause: r.status }); + } + return r[type](); + }; +}; diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..afe2fdc --- /dev/null +++ b/package-lock.json @@ -0,0 +1,4348 @@ +{ + "name": "libchelonia", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "libchelonia", + "version": "1.0.0", + "license": "AGPL-3.0", + "dependencies": { + "@apeleghq/multipart-parser": "1.0.18", + "@apeleghq/rfc8188": "1.0.8", + "@chelonia/crypto": "1.0.1", + "@chelonia/serdes": "1.0.0", + "@sbp/okturtles.data": "0.1.6", + "@sbp/okturtles.eventqueue": "1.2.1", + "@sbp/okturtles.events": "1.0.1", + "turtledash": "1.0.2" + }, + "devDependencies": { + "@types/node": "22.15.29", + "@typescript-eslint/eslint-plugin": "8.33.0", + "eslint-config-standard": "17.1.0", + "ts-node": "10.9.2", + "typescript": "5.8.3" + }, + "peerDependencies": { + "@sbp/sbp": "2.x" + } + }, + "node_modules/@apeleghq/multipart-parser": { + "version": "1.0.18", + "resolved": "https://registry.npmjs.org/@apeleghq/multipart-parser/-/multipart-parser-1.0.18.tgz", + "integrity": "sha512-4H++A5cMIXNJtdi5cVRvNIvIdILrFma8xuJGT2k2Wn1e++dTVhlZKnEwcwknB7z025rw+n96ns58o76lM5x4dg==", + "license": "ISC", + "engines": { + "node": ">=16.0.0", + "npm": ">=8.0.0" + } + }, + "node_modules/@apeleghq/rfc8188": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@apeleghq/rfc8188/-/rfc8188-1.0.8.tgz", + "integrity": "sha512-IK0IJnhgiyf+/BGuGFxGh5an8i9CiM9m9fmVlHOi18y/CibUeeugnbVDwOl7HFlbtmU0ofL6fHccUelLhGcxQg==", + "license": "ISC" + }, + "node_modules/@chelonia/crypto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@chelonia/crypto/-/crypto-1.0.1.tgz", + "integrity": "sha512-NfH/WAwNs7X/GFGceckWwTSJ9D72WUUWb6gFUQkqBMjqZh7X33zyArCuiN2EfYCcmERjFqUTDMN8Ga1jqbbi8g==", + "license": "AGPL-3.0", + "dependencies": { + "@chelonia/multiformats": "1.0.0", + "scrypt-async": "2.0.1", + "tweetnacl": "1.0.3" + } + }, + "node_modules/@chelonia/multiformats": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@chelonia/multiformats/-/multiformats-1.0.0.tgz", + "integrity": "sha512-3Kkw8IoXI8x5ML2GNXqnDYnlFf92CZt56cHOU/iBx6FFVqd4vqdZfTEAg4bxkseNilIBda9Ex04Vt+uF3NPQhQ==", + "license": "AGPL-3.0", + "dependencies": { + "blakejs": "^1.2.1" + } + }, + "node_modules/@chelonia/serdes": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@chelonia/serdes/-/serdes-1.0.0.tgz", + "integrity": "sha512-MkkwhuZ8pGPOpYiTse/u2sxUK1pVm97QtC8yPbwaYqhSc5W0SMI4VaKe+h/ERagNCwxXH7hntLFSpevEvQpJ/A==", + "license": "MIT" + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", + "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", + "dev": true, + "license": "BSD-3-Clause", + "peer": true + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/@sbp/okturtles.data": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/@sbp/okturtles.data/-/okturtles.data-0.1.6.tgz", + "integrity": "sha512-ZAmg7XXZtC0R9Cn5egFtrFCYl5DG/oVQ9sToGz3IAB/CoYxEw1xFtDXtGaZFYbBMo9MMkC6QCylz7e1Top7sHA==", + "license": "MIT", + "peerDependencies": { + "@sbp/sbp": "2.x" + } + }, + "node_modules/@sbp/okturtles.eventqueue": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@sbp/okturtles.eventqueue/-/okturtles.eventqueue-1.2.1.tgz", + "integrity": "sha512-ty1PEmcrS/qwHWTPWpjJa0HEyYKl3AAIyA1gRf8+RWj8rPCywCRvig2+hrKiZVoZoOZdhd0FC37ECHCnB5+ugQ==", + "license": "MIT", + "peerDependencies": { + "@sbp/sbp": "2.x" + } + }, + "node_modules/@sbp/okturtles.events": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@sbp/okturtles.events/-/okturtles.events-1.0.1.tgz", + "integrity": "sha512-cFPGjx0sw9fBLK7yWaumM2HYJc+kW+4tP7zTpio6MfMsdKA1iTM/mNkxjIHTCnp50YUYA7ZYcresj2WZLrB0zA==", + "license": "MIT", + "dependencies": { + "@sbp/okturtles.data": "^0.1.6" + }, + "peerDependencies": { + "@sbp/sbp": "2.x" + } + }, + "node_modules/@sbp/sbp": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@sbp/sbp/-/sbp-2.4.1.tgz", + "integrity": "sha512-k6Blz95Gl1KlTc/PHNRMCQDD3niZ1p5tTHOzABiLo82YYQe8UGNl/AiastIJ8GfByW7M/VHLYLsQEK/41Skx3g==", + "license": "MIT", + "peer": true + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/@types/node": { + "version": "22.15.29", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.29.tgz", + "integrity": "sha512-LNdjOkUDlU1RZb8e1kOIUpN1qQUlzGkEtbVNo53vbrwDg5om6oduhm4SiUaPW5ASTXhAiP0jInWG8Qx9fVlOeQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.33.0.tgz", + "integrity": "sha512-CACyQuqSHt7ma3Ns601xykeBK/rDeZa3w6IS6UtMQbixO5DWy+8TilKkviGDH6jtWCo8FGRKEK5cLLkPvEammQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.33.0", + "@typescript-eslint/type-utils": "8.33.0", + "@typescript-eslint/utils": "8.33.0", + "@typescript-eslint/visitor-keys": "8.33.0", + "graphemer": "^1.4.0", + "ignore": "^7.0.0", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.33.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.33.0.tgz", + "integrity": "sha512-JaehZvf6m0yqYp34+RVnihBAChkqeH+tqqhS0GuX1qgPpwLvmTPheKEs6OeCK6hVJgXZHJ2vbjnC9j119auStQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@typescript-eslint/scope-manager": "8.33.0", + "@typescript-eslint/types": "8.33.0", + "@typescript-eslint/typescript-estree": "8.33.0", + "@typescript-eslint/visitor-keys": "8.33.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/project-service": { + "version": "8.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.33.0.tgz", + "integrity": "sha512-d1hz0u9l6N+u/gcrk6s6gYdl7/+pp8yHheRTqP6X5hVDKALEaTn8WfGiit7G511yueBEL3OpOEpD+3/MBdoN+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.33.0", + "@typescript-eslint/types": "^8.33.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.33.0.tgz", + "integrity": "sha512-LMi/oqrzpqxyO72ltP+dBSP6V0xiUb4saY7WLtxSfiNEBI8m321LLVFU9/QDJxjDQG9/tjSqKz/E3380TEqSTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.33.0", + "@typescript-eslint/visitor-keys": "8.33.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.33.0.tgz", + "integrity": "sha512-sTkETlbqhEoiFmGr1gsdq5HyVbSOF0145SYDJ/EQmXHtKViCaGvnyLqWFFHtEXoS0J1yU8Wyou2UGmgW88fEug==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.33.0.tgz", + "integrity": "sha512-lScnHNCBqL1QayuSrWeqAL5GmqNdVUQAAMTaCwdYEdWfIrSrOGzyLGRCHXcCixa5NK6i5l0AfSO2oBSjCjf4XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/typescript-estree": "8.33.0", + "@typescript-eslint/utils": "8.33.0", + "debug": "^4.3.4", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.33.0.tgz", + "integrity": "sha512-DKuXOKpM5IDT1FA2g9x9x1Ug81YuKrzf4mYX8FAVSNu5Wo/LELHWQyM1pQaDkI42bX15PWl0vNPt1uGiIFUOpg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.33.0.tgz", + "integrity": "sha512-vegY4FQoB6jL97Tu/lWRsAiUUp8qJTqzAmENH2k59SJhw0Th1oszb9Idq/FyyONLuNqT1OADJPXfyUNOR8SzAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/project-service": "8.33.0", + "@typescript-eslint/tsconfig-utils": "8.33.0", + "@typescript-eslint/types": "8.33.0", + "@typescript-eslint/visitor-keys": "8.33.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.33.0.tgz", + "integrity": "sha512-lPFuQaLA9aSNa7D5u2EpRiqdAUhzShwGg/nhpBlc4GR6kcTABttCuyjFs8BcEZ8VWrjCBof/bePhP3Q3fS+Yrw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.33.0", + "@typescript-eslint/types": "8.33.0", + "@typescript-eslint/typescript-estree": "8.33.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.33.0.tgz", + "integrity": "sha512-7RW7CMYoskiz5OOGAWjJFxgb7c5UNjTG292gYhWeOAcFmYCtVCSqjqSBj5zMhxbXo2JOW95YYrUWJfU0zrpaGQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.33.0", + "eslint-visitor-keys": "^4.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", + "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "dev": true, + "license": "ISC", + "peer": true + }, + "node_modules/acorn": { + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peer": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0", + "peer": true + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", + "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "is-array-buffer": "^3.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-includes": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", + "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", + "is-string": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.findlastindex": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz", + "integrity": "sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flat": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz", + "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz", + "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", + "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/async-function": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", + "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/blakejs": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.2.1.tgz", + "integrity": "sha512-QXUSXI3QVc/gJME0dBpXrag1kbzOqCjCX8/b54ntNyW6sjtoqxqRk3LTmXzaJoh71zMsDCjM+47jS7XiwN/+fQ==", + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/builtin-modules": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", + "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/builtins": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.1.0.tgz", + "integrity": "sha512-SW9lzGTLvWTP1AY8xeAMZimqDrIaSdLQUcVr9DMef51niJ022Ri87SwRRKYm4A6iHfkPaiVUu/Duw2Wc4J7kKg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "semver": "^7.0.0" + } + }, + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/data-view-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", + "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", + "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/inspect-js" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", + "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-abstract": { + "version": "1.23.9", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.9.tgz", + "integrity": "sha512-py07lI0wjxAC/DcfK1S6G7iANonniZwTISvdPzk9hzeH0IZIshbuuFxLIU96OyF89Yb9hiqWn8M/bY83KY5vzA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.2", + "arraybuffer.prototype.slice": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "data-view-buffer": "^1.0.2", + "data-view-byte-length": "^1.0.2", + "data-view-byte-offset": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.1.0", + "es-to-primitive": "^1.3.0", + "function.prototype.name": "^1.1.8", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.0", + "get-symbol-description": "^1.1.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "internal-slot": "^1.1.0", + "is-array-buffer": "^3.0.5", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.2", + "is-regex": "^1.2.1", + "is-shared-array-buffer": "^1.0.4", + "is-string": "^1.1.1", + "is-typed-array": "^1.1.15", + "is-weakref": "^1.1.0", + "math-intrinsics": "^1.1.0", + "object-inspect": "^1.13.3", + "object-keys": "^1.1.1", + "object.assign": "^4.1.7", + "own-keys": "^1.0.1", + "regexp.prototype.flags": "^1.5.3", + "safe-array-concat": "^1.1.3", + "safe-push-apply": "^1.0.0", + "safe-regex-test": "^1.1.0", + "set-proto": "^1.0.0", + "string.prototype.trim": "^1.2.10", + "string.prototype.trimend": "^1.0.9", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.3", + "typed-array-byte-length": "^1.0.3", + "typed-array-byte-offset": "^1.0.4", + "typed-array-length": "^1.0.7", + "unbox-primitive": "^1.1.0", + "which-typed-array": "^1.1.18" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz", + "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-to-primitive": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", + "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-symbol": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-compat-utils": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/eslint-compat-utils/-/eslint-compat-utils-0.5.1.tgz", + "integrity": "sha512-3z3vFexKIEnjHE3zCMRo6fn/e44U7T1khUjg+Hp0ZQMCigh28rALD0nPFBcGZuiLC5rLZa2ubQHDRln09JfU2Q==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "eslint": ">=6.0.0" + } + }, + "node_modules/eslint-config-standard": { + "version": "17.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.1.0.tgz", + "integrity": "sha512-IwHwmaBNtDK4zDHQukFDW5u/aTb8+meQWZvNFWkiGmbWjD6bqyuSSBxxXKkCftCUzc1zwCH2m/baCNDLGmuO5Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "eslint": "^8.0.1", + "eslint-plugin-import": "^2.25.2", + "eslint-plugin-n": "^15.0.0 || ^16.0.0 ", + "eslint-plugin-promise": "^6.0.0" + } + }, + "node_modules/eslint-import-resolver-node": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "debug": "^3.2.7", + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" + } + }, + "node_modules/eslint-import-resolver-node/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-module-utils": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.0.tgz", + "integrity": "sha512-wALZ0HFoytlyh/1+4wuZ9FJCD/leWHQzzrxJ8+rebyReSLk7LApMyd3WJaLVoN+D5+WIdJyDK1c6JnE65V4Zyg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "debug": "^3.2.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-es-x": { + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es-x/-/eslint-plugin-es-x-7.8.0.tgz", + "integrity": "sha512-7Ds8+wAAoV3T+LAKeu39Y5BzXCrGKrcISfgKEqTS4BDN8SFEDQd0S43jiQ8vIa3wUKD07qitZdfzlenSi8/0qQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/ota-meshi", + "https://opencollective.com/eslint" + ], + "license": "MIT", + "peer": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.1.2", + "@eslint-community/regexpp": "^4.11.0", + "eslint-compat-utils": "^0.5.1" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": ">=8" + } + }, + "node_modules/eslint-plugin-import": { + "version": "2.31.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.31.0.tgz", + "integrity": "sha512-ixmkI62Rbc2/w8Vfxyh1jQRTdRTF52VxwRVHl/ykPAmqG+Nb7/kNn+byLP0LxPgI7zWA16Jt82SybJInmMia3A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.8", + "array.prototype.findlastindex": "^1.2.5", + "array.prototype.flat": "^1.3.2", + "array.prototype.flatmap": "^1.3.2", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.12.0", + "hasown": "^2.0.2", + "is-core-module": "^2.15.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.0", + "semver": "^6.3.1", + "string.prototype.trimend": "^1.0.8", + "tsconfig-paths": "^3.15.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9" + } + }, + "node_modules/eslint-plugin-import/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint-plugin-import/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-import/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/eslint-plugin-import/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "peer": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/eslint-plugin-n": { + "version": "16.6.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-16.6.2.tgz", + "integrity": "sha512-6TyDmZ1HXoFQXnhCTUjVFULReoBPOAjpuiKELMkeP40yffI/1ZRO+d9ug/VC6fqISo2WkuIBk3cvuRPALaWlOQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "builtins": "^5.0.1", + "eslint-plugin-es-x": "^7.5.0", + "get-tsconfig": "^4.7.0", + "globals": "^13.24.0", + "ignore": "^5.2.4", + "is-builtin-module": "^3.2.1", + "is-core-module": "^2.12.1", + "minimatch": "^3.1.2", + "resolve": "^1.22.2", + "semver": "^7.5.3" + }, + "engines": { + "node": ">=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-n/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint-plugin-n/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/eslint-plugin-promise": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz", + "integrity": "sha512-57Zzfw8G6+Gq7axm2Pdo3gW/Rx3h9Yywgn61uE/3elTCOePEHVrn2i5CdfBwA1BLK0Q0WqctICIUSqXZW/VprQ==", + "dev": true, + "license": "ISC", + "peer": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0 || ^9.0.0" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "license": "BSD-2-Clause", + "peer": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "license": "BSD-2-Clause", + "peer": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "peer": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "peer": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "peer": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC", + "peer": true + }, + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC", + "peer": true + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function.prototype.name": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", + "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "functions-have-names": "^1.2.3", + "hasown": "^2.0.2", + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-symbol-description": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", + "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-tsconfig": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.0.tgz", + "integrity": "sha512-kGzZ3LWWQcGIAmg6iWvXn0ei6WDtV26wzHRMwDSzmAbcXrTEXxHy6IehI6/4eT6VRKyMP1eF1VqwrVUmE/LR7A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globalthis": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "define-properties": "^1.2.1", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, + "node_modules/has-bigints": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz", + "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", + "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "dunder-proto": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC", + "peer": true + }, + "node_modules/internal-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.2", + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", + "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-async-function": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", + "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "async-function": "^1.0.0", + "call-bound": "^1.0.3", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", + "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "has-bigints": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-boolean-object": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", + "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-builtin-module": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", + "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "builtin-modules": "^3.3.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", + "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", + "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-finalizationregistry": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", + "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-generator-function": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz", + "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "get-proto": "^1.0.0", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", + "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-regex": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-set": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", + "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-string": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", + "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", + "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "has-symbols": "^1.1.0", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakmap": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", + "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", + "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC", + "peer": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", + "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0", + "has-symbols": "^1.1.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.groupby": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.values": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz", + "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/own-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", + "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "get-intrinsic": "^1.2.6", + "object-keys": "^1.1.1", + "safe-push-apply": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", + "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-array-concat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", + "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "has-symbols": "^1.1.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-push-apply": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", + "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-regex-test": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", + "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-regex": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/scrypt-async": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/scrypt-async/-/scrypt-async-2.0.1.tgz", + "integrity": "sha512-wHR032jldwZNy7Tzrfu7RccOgGf8r5hyDMSP2uV6DpLiBUsR8JsDcx/in73o2UGVVrH5ivRFdNsFPcjtl3LErQ==", + "license": "BSD" + }, + "node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-proto": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", + "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "dunder-proto": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trim": { + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", + "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-data-property": "^1.1.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-object-atoms": "^1.0.0", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", + "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/tsconfig-paths": { + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "node_modules/turtledash": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/turtledash/-/turtledash-1.0.2.tgz", + "integrity": "sha512-hLmeevYrlFlLxeB4lYZOtV6TlRe0NQ6YxTVotWXVL9ubqSm2UgZaoSTpQjBJz3Ag27oHJA3iIi017CkubPp37w==", + "license": "MIT" + }, + "node_modules/tweetnacl": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz", + "integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==", + "license": "Unlicense" + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "peer": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", + "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", + "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.15", + "reflect.getprototypeof": "^1.0.9" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", + "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0", + "reflect.getprototypeof": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/unbox-primitive": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", + "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.3", + "has-bigints": "^1.0.2", + "has-symbols": "^1.1.0", + "which-boxed-primitive": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "peer": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "peer": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", + "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "is-bigint": "^1.1.0", + "is-boolean-object": "^1.2.1", + "is-number-object": "^1.1.1", + "is-string": "^1.1.1", + "is-symbol": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-builtin-type": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", + "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "call-bound": "^1.0.2", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", + "is-async-function": "^2.0.0", + "is-date-object": "^1.1.0", + "is-finalizationregistry": "^1.1.0", + "is-generator-function": "^1.0.10", + "is-regex": "^1.2.1", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.1.0", + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-collection": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC", + "peer": true + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..cd3e903 --- /dev/null +++ b/package.json @@ -0,0 +1,270 @@ +{ + "name": "libchelonia", + "version": "1.0.0", + "description": "Library for building Chelonia applications", + "main": "dist/cjs/index.cjs", + "types": "dist/cjs/index.d.cts", + "module": "dist/esm/index.mjs", + "type": "module", + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.mts", + "default": "./dist/esm/index.mjs" + }, + "require": { + "types": "./dist/cjs/index.d.cts", + "default": "./dist/cjs/index.cjs" + } + }, + "./chelonia":{ + "import":{ + "types": "./dist/esm/chelonia.d.mts", + "default": "./dist/esm/chelonia.mjs" + }, + "require":{ + "types": "./dist/cjs/chelonia.d.cts", + "default": "./dist/cjs/chelonia.cjs" + } + }, + "./chelonia-utils":{ + "import":{ + "types": "./dist/esm/chelonia-utils.d.mts", + "default": "./dist/esm/chelonia-utils.mjs" + }, + "require":{ + "types": "./dist/cjs/chelonia-utils.d.cts", + "default": "./dist/cjs/chelonia-utils.cjs" + } + }, + "./constants":{ + "import":{ + "types": "./dist/esm/constants.d.mts", + "default": "./dist/esm/constants.mjs" + }, + "require":{ + "types": "./dist/cjs/constants.d.cts", + "default": "./dist/cjs/constants.cjs" + } + }, + "./db":{ + "import":{ + "types": "./dist/esm/db.d.mts", + "default": "./dist/esm/db.mjs" + }, + "require":{ + "types": "./dist/cjs/db.d.cts", + "default": "./dist/cjs/db.cjs" + } + }, + "./encryptedData":{ + "import":{ + "types": "./dist/esm/encryptedData.d.mts", + "default": "./dist/esm/encryptedData.mjs" + }, + "require":{ + "types": "./dist/cjs/encryptedData.d.cts", + "default": "./dist/cjs/encryptedData.cjs" + } + }, + "./errors":{ + "import":{ + "types": "./dist/esm/errors.d.mts", + "default": "./dist/esm/errors.mjs" + }, + "require":{ + "types": "./dist/cjs/errors.d.cts", + "default": "./dist/cjs/errors.cjs" + } + }, + "./events":{ + "import":{ + "types": "./dist/esm/events.d.mts", + "default": "./dist/esm/events.mjs" + }, + "require":{ + "types": "./dist/cjs/events.d.cts", + "default": "./dist/cjs/events.cjs" + } + }, + "./files":{ + "import":{ + "types": "./dist/esm/files.d.mts", + "default": "./dist/esm/files.mjs" + }, + "require":{ + "types": "./dist/cjs/files.d.cts", + "default": "./dist/cjs/files.cjs" + } + }, + "./functions":{ + "import":{ + "types": "./dist/esm/functions.d.mts", + "default": "./dist/esm/functions.mjs" + }, + "require":{ + "types": "./dist/cjs/functions.d.cts", + "default": "./dist/cjs/functions.cjs" + } + }, + "./local-selectors":{ + "import":{ + "types": "./dist/esm/local-selectors/index.d.mts", + "default": "./dist/esm/local-selectors/index.mjs" + }, + "require":{ + "types": "./dist/cjs/local-selectors/index.d.cts", + "default": "./dist/cjs/local-selectors/index.cjs" + } + }, + "./persistent-actions":{ + "import":{ + "types": "./dist/esm/persistent-actions.d.mts", + "default": "./dist/esm/persistent-actions.mjs" + }, + "require":{ + "types": "./dist/cjs/persistent-actions.d.cts", + "default": "./dist/cjs/persistent-actions.cjs" + } + }, + "./presets":{ + "import":{ + "types": "./dist/esm/presets.d.mts", + "default": "./dist/esm/presets.mjs" + }, + "require":{ + "types": "./dist/cjs/presets.d.cts", + "default": "./dist/cjs/presets.cjs" + } + }, + "./pubsub":{ + "import":{ + "types": "./dist/esm/pubsub/index.d.mts", + "default": "./dist/esm/pubsub/index.mjs" + }, + "require":{ + "types": "./dist/cjs/pubsub/index.d.cts", + "default": "./dist/cjs/pubsub/index.cjs" + } + }, + "./Secret":{ + "import":{ + "types": "./dist/esm/Secret.d.mts", + "default": "./dist/esm/Secret.mjs" + }, + "require":{ + "types": "./dist/cjs/Secret.d.cts", + "default": "./dist/cjs/Secret.cjs" + } + }, + "./signedData":{ + "import":{ + "types": "./dist/esm/signedData.d.mts", + "default": "./dist/esm/signedData.mjs" + }, + "require":{ + "types": "./dist/cjs/signedData.d.cts", + "default": "./dist/cjs/signedData.cjs" + } + }, + "./SPMessage":{ + "import":{ + "types": "./dist/esm/SPMessage.d.mts", + "default": "./dist/esm/SPMessage.mjs" + }, + "require":{ + "types": "./dist/cjs/SPMessage.d.cts", + "default": "./dist/cjs/SPMessage.cjs" + } + }, + "./time-sync":{ + "import":{ + "types": "./dist/esm/time-sync.d.mts", + "default": "./dist/esm/time-sync.mjs" + }, + "require":{ + "types": "./dist/cjs/time-sync.d.cts", + "default": "./dist/cjs/time-sync.cjs" + } + }, + "./types":{ + "import":{ + "types": "./dist/esm/types.d.mts", + "default": "./dist/esm/types.mjs" + }, + "require":{ + "types": "./dist/cjs/types.d.cts", + "default": "./dist/cjs/types.cjs" + } + }, + "./utils":{ + "import":{ + "types": "./dist/esm/utils.d.mts", + "default": "./dist/esm/utils.mjs" + }, + "require":{ + "types": "./dist/cjs/utils.d.cts", + "default": "./dist/cjs/utils.cjs" + } + } + }, + "scripts": { + "test": "node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\")); process.on(\"uncaughtException\", (e) => console.error(\"ERROR\", e));' src/index.test.ts", + "build:esm": "node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\")); process.on(\"uncaughtException\", (e) => console.error(\"ERROR\", e));' buildHelper.ts esm", + "build:cjs": "node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\")); process.on(\"uncaughtException\", (e) => console.error(\"ERROR\", e));' buildHelper.ts cjs", + "build": "npm run build:esm && npm run build:cjs", + "lint": "eslint .", + "clean": "rm -f dist/*" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/okTurtles/libcheloniajs.git" + }, + "author": "okTurtles Foundation Inc.", + "license": "AGPL-3.0", + "bugs": { + "url": "https://github.com/okTurtles/libcheloniajs/issues" + }, + "homepage": "https://github.com/okTurtles/libcheloniajs#readme", + "dependencies": { + "@apeleghq/multipart-parser": "1.0.18", + "@apeleghq/rfc8188": "1.0.8", + "@chelonia/crypto": "1.0.1", + "@chelonia/serdes": "1.0.0", + "@sbp/okturtles.data": "0.1.6", + "@sbp/okturtles.eventqueue": "1.2.1", + "@sbp/okturtles.events": "1.0.1", + "turtledash": "1.0.2" + }, + "devDependencies": { + "@types/node": "22.15.29", + "@typescript-eslint/eslint-plugin": "8.33.0", + "eslint-config-standard": "17.1.0", + "ts-node": "10.9.2", + "typescript": "5.8.3" + }, + "peerDependencies": { + "@sbp/sbp": "2.x" + }, + "eslintConfig": { + "root": true, + "env": { + "browser": true, + "commonjs": true, + "node": true + }, + "parser": "@typescript-eslint/parser", + "plugins": [ + "@typescript-eslint" + ], + "extends": [ + "plugin:@typescript-eslint/recommended", + "standard" + ] + }, + "eslintIgnore": [ + "dist/*", + "node_modules/*", + "**/*.md" + ] +} diff --git a/renameFiles.mjs b/renameFiles.mjs new file mode 100755 index 0000000..c7dce5d --- /dev/null +++ b/renameFiles.mjs @@ -0,0 +1,65 @@ +#!/usr/bin/env node + +import fs from 'node:fs/promises' +import path from 'node:path' + +// Process a directory recursively, renaming files as needed + +const extMap = { + esm: { + '.d.ts': '.d.mts', + '.js': '.mjs' + }, + cjs: { + '.d.ts': '.d.cts', + '.js': '.cjs' + } +} + +/** + * @param {string} dir Directory name + * @param {Record} ext File extension map + */ +function processDirectory (dir, ext) { + fs.readdir(dir, { withFileTypes: true }).then((entries) => { + return Promise.all(entries.map(async (entry) => { + const fullPath = path.join(dir, entry.name) + if (entry.isDirectory()) { + // Recursively process subdirectories + return processDirectory(fullPath, ext) + } else if (entry.isFile() && Object.keys(ext).some(e => entry.name.endsWith(e))) { + // Generate the new file name + const curExt = Object.keys(ext).find(e => entry.name.endsWith(e)) + if (!curExt) throw new Error('Extension not found') + const newExt = ext[curExt] + const newFullPath = fullPath.slice(0, -curExt.length) + newExt + + if (ext['.js']) { + const jsNewExt = ext['.js'] + await fs.readFile(fullPath, { encoding: 'utf8' }).then((content) => { + const newContent = content + .replace(/(?<=".*)\.js(?=")/g, jsNewExt) + .replace(/(?<='.*)\.js(?=')/g, jsNewExt) + return fs.writeFile(fullPath, newContent) + }) + } + + await fs.rename(fullPath, newFullPath).catch((err) => { + console.error(`Error renaming ${fullPath} to ${newFullPath}:`, err) + }) + } + + return null + })) + }).catch(err => { + console.error(`Error reading directory ${dir}:`, err) + }) +} + +if (process.argv[2] === 'esm') { + processDirectory('./dist/esm', extMap.esm) +} else if (process.argv[2] === 'cjs') { + processDirectory('./dist/cjs', extMap.cjs) +} else { + console.error('Invalid dist output') +} diff --git a/src/SPMessage.ts b/src/SPMessage.ts new file mode 100644 index 0000000..05eb296 --- /dev/null +++ b/src/SPMessage.ts @@ -0,0 +1,600 @@ +import type { Key } from '@chelonia/crypto' +import { CURVE25519XSALSA20POLY1305, EDWARDS25519SHA512BATCH, XSALSA20POLY1305, keyId } from '@chelonia/crypto' +import { serdesDeserializeSymbol, serdesSerializeSymbol, serdesTagSymbol } from '@chelonia/serdes' +import { has } from 'turtledash' +import type { EncryptedData } from './encryptedData.js' +import { encryptedIncomingData, encryptedIncomingForeignData, maybeEncryptedIncomingData, unwrapMaybeEncryptedData } from './encryptedData.js' +import { createCID, multicodes } from './functions.js' +import type { SignedData } from './signedData.js' +import { isRawSignedData, isSignedData, rawSignedIncomingData, signedIncomingData } from './signedData.js' +import type { ChelContractKey, ChelContractState, JSONObject, JSONType } from './types.js' + +export type SPKeyType = typeof EDWARDS25519SHA512BATCH | typeof CURVE25519XSALSA20POLY1305 | typeof XSALSA20POLY1305 + +export type SPKeyPurpose = 'enc' | 'sig' | 'sak' + +export type SPKey = { + id: string; + name: string; + purpose: SPKeyPurpose[], + ringLevel: number; + permissions: '*' | string[]; + allowedActions?: '*' | string[]; + meta?: { + quantity?: number; + expires?: number; + private?: { + transient?: boolean; + content?: EncryptedData; + shareable?: boolean; + oldKeys?: string; + }, + keyRequest?: { + contractID?: string, + reference?: string | EncryptedData, + } + }, + data: string; + foreignKey?: string; + _notBeforeHeight: number; + _notAfterHeight?: number; + _private?: string; +} +// Allows server to check if the user is allowed to register this type of contract +// TODO: rename 'type' to 'contractName': +export type SPOpContract = { type: string; keys: (SPKey | EncryptedData)[]; parentContract?: string } +export type ProtoSPOpActionUnencrypted = { action: string; data: JSONType; meta: JSONObject } +export type SPOpActionUnencrypted = ProtoSPOpActionUnencrypted | SignedData +export type SPOpActionEncrypted = EncryptedData // encrypted version of SPOpActionUnencrypted +export type SPOpKeyAdd = (SPKey | EncryptedData)[] +export type SPOpKeyDel = (string | EncryptedData)[] +export type SPOpPropSet = { key: string; value: JSONType } +export type ProtoSPOpKeyShare = { contractID: string; keys: SPKey[]; foreignContractID?: string; keyRequestHash?: string, keyRequestHeight?: number } +export type SPOpKeyShare = ProtoSPOpKeyShare | EncryptedData +// TODO encrypted SPOpKeyRequest +export type ProtoSPOpKeyRequest = { + contractID: string; + height: number; + replyWith: SignedData<{ + encryptionKeyId: string; + responseKey: EncryptedData; + }>, + request: string; +} +export type SPOpKeyRequest = ProtoSPOpKeyRequest | EncryptedData +export type ProtoSPOpKeyRequestSeen = { keyRequestHash: string; keyShareHash?: string; success: boolean }; +export type SPOpKeyRequestSeen = ProtoSPOpKeyRequestSeen | EncryptedData; +export type SPKeyUpdate = { + name: string; + id?: string; + oldKeyId: string; + data?: string; + purpose?: string[]; + permissions?: string[]; + allowedActions?: '*' | string[]; + meta?: { + quantity?: number; + expires?: number; + private?: { + transient?: boolean; + content?: string; + shareable?: boolean; + oldKeys?: string; + } + } +} +export type SPOpKeyUpdate = (SPKeyUpdate | EncryptedData)[] + +export type SPOpType = 'c' | 'a' | 'ae' | 'au' | 'ka' | 'kd' | 'ku' | 'pu' | 'ps' | 'pd' | 'ks' | 'kr' | 'krs' +type ProtoSPOpValue = SPOpContract | SPOpActionEncrypted | SPOpActionUnencrypted | SPOpKeyAdd | SPOpKeyDel | SPOpPropSet | SPOpKeyShare | SPOpKeyRequest | SPOpKeyRequestSeen | SPOpKeyUpdate +export type ProtoSPOpMap = { + 'c': SPOpContract, + 'ae': SPOpActionEncrypted, + 'au': SPOpActionUnencrypted, + 'ka': SPOpKeyAdd, + 'kd': SPOpKeyDel, + 'ku': SPOpKeyUpdate, + 'pu': never, + 'ps': SPOpPropSet, + 'pd': never, + 'ks': SPOpKeyShare, + 'kr': SPOpKeyRequest, + 'krs': SPOpKeyRequestSeen +} +export type SPOpAtomic = { + [K in keyof ProtoSPOpMap]: [K, ProtoSPOpMap[K]] +}[keyof ProtoSPOpMap][] +export type SPOpValue = ProtoSPOpValue | SPOpAtomic +export type SPOpRaw = [SPOpType, SignedData] +export type SPOpMap = ProtoSPOpMap & { 'a': SPOpAtomic } +export type SPOp = { + [K in keyof SPOpMap]: [K, SPOpMap[K]] +}[keyof SPOpMap] + +export type SPMsgDirection = 'incoming' | 'outgoing' +export type SPHead = { version: '1.0.0', op: SPOpType, height: number, contractID: string | null, previousKeyOp: string | null, previousHEAD: string | null, manifest: string } +type SPMsgParams = { direction: SPMsgDirection, mapping: { key: string, value: string }; head: SPHead; signedMessageData: SignedData } + +// Takes a raw message and processes it so that EncryptedData and SignedData +// attributes are defined +const decryptedAndVerifiedDeserializedMessage = (head: SPHead, headJSON: string, contractID: string, parsedMessage: SPOpValue, additionalKeys: Record | undefined, state: ChelContractState): SPOpValue => { + const op = head.op + const height = head.height + + const message: SPOpValue = op === SPMessage.OP_ACTION_ENCRYPTED + ? encryptedIncomingData(contractID, state, parsedMessage as [string, string], height, additionalKeys, headJSON, undefined) + : parsedMessage + + // If the operation is SPMessage.OP_KEY_ADD or SPMessage.OP_KEY_UPDATE, + // extract encrypted data from key.meta?.private?.content + if (([SPMessage.OP_KEY_ADD, SPMessage.OP_KEY_UPDATE] as SPOpType[]).includes(op as SPOpType)) { + return (message as SPOpKeyAdd | SPOpKeyUpdate).map((key) => { + return maybeEncryptedIncomingData(contractID, state, key as SPKey, height, additionalKeys, headJSON, (key) => { + if (key.meta?.private?.content) { + key.meta.private.content = encryptedIncomingData(contractID, state, key.meta.private.content as unknown as [string, string], height, additionalKeys, headJSON, (value) => { + // Validator function to verify the key matches its expected ID + const computedKeyId = keyId(value) + if (computedKeyId !== key.id) { + throw new Error(`Key ID mismatch. Expected to decrypt key ID ${key.id} but got ${computedKeyId}`) + } + }) + } + // key.meta?.keyRequest?.contractID could be optionally encrypted + if (key.meta?.keyRequest?.reference) { + try { + key.meta.keyRequest.reference = maybeEncryptedIncomingData(contractID, state, key.meta.keyRequest.reference as string, height, additionalKeys, headJSON)?.valueOf() + } catch { + // If we couldn't decrypt it, this value is of no use to us (we + // can't keep track of key requests and key shares), so we delete it + delete key.meta.keyRequest.reference + } + } + // key.meta?.keyRequest?.contractID could be optionally encrypted + if (key.meta?.keyRequest?.contractID) { + try { + key.meta.keyRequest.contractID = maybeEncryptedIncomingData(contractID, state, key.meta.keyRequest.contractID, height, additionalKeys, headJSON)?.valueOf() + } catch { + // If we couldn't decrypt it, this value is of no use to us (we + // can't keep track of key requests and key shares), so we delete it + delete key.meta.keyRequest.contractID + } + } + }) + }) + } + + // If the operation is SPMessage.OP_CONTRACT, + // extract encrypted data from keys?.[].meta?.private?.content + if (op === SPMessage.OP_CONTRACT) { + (message as SPOpContract).keys = (message as SPOpContract).keys?.map((key) => { + return maybeEncryptedIncomingData(contractID, state, key as SPKey, height, additionalKeys, headJSON, (key) => { + if (!key.meta?.private?.content) return + // The following two lines are commented out because this feature + // (using a foreign decryption contract) doesn't seem to be in use and + // the use case seems unclear. + // const decryptionFn = key.meta.private.foreignContractID ? encryptedIncomingForeignData : encryptedIncomingData + // const decryptionContract = key.meta.private.foreignContractID ? key.meta.private.foreignContractID : contractID + const decryptionFn = encryptedIncomingData + const decryptionContract = contractID + key.meta.private.content = decryptionFn(decryptionContract, state as never, key.meta.private.content as unknown as [string, string], height as never, additionalKeys, headJSON, (value) => { + const computedKeyId = keyId(value) + if (computedKeyId !== key.id) { + throw new Error(`Key ID mismatch. Expected to decrypt key ID ${key.id} but got ${computedKeyId}`) + } + }) + }) + }) + } + + // If the operation is SPMessage.OP_KEY_SHARE, + // extract encrypted data from keys?.[].meta?.private?.content + if (op === SPMessage.OP_KEY_SHARE) { + return maybeEncryptedIncomingData(contractID, state, message as ProtoSPOpKeyShare, height, additionalKeys, headJSON, (message) => { + message.keys?.forEach((key) => { + if (!key.meta?.private?.content) return + const decryptionFn = message.foreignContractID ? encryptedIncomingForeignData : encryptedIncomingData + const decryptionContract = message.foreignContractID || contractID + key.meta.private.content = decryptionFn(decryptionContract, state as never, key.meta.private.content as unknown as [string, string], height as never, additionalKeys, headJSON, (value) => { + const computedKeyId = keyId(value) + if (computedKeyId !== key.id) { + throw new Error(`Key ID mismatch. Expected to decrypt key ID ${key.id} but got ${computedKeyId}`) + } + }) + }) + }) + } + + // If the operation is OP_KEY_REQUEST, the payload might be EncryptedData + // The ReplyWith attribute is SignedData + if (op === SPMessage.OP_KEY_REQUEST) { + return maybeEncryptedIncomingData(contractID, state, message as ProtoSPOpKeyRequest, height, additionalKeys, headJSON, (msg) => { + msg.replyWith = signedIncomingData(msg.contractID, undefined, msg.replyWith as unknown as { _signedData: [string, string, string] }, msg.height, headJSON) + }) + } + + // If the operation is OP_ACTION_UNENCRYPTED, it may contain an inner + // signature + // Actions must be signed using a key for the current contract + if (op === SPMessage.OP_ACTION_UNENCRYPTED && isRawSignedData(message)) { + return signedIncomingData(contractID, state, message, height, headJSON) + } + + // Inner signatures are handled by EncryptedData + if (op === SPMessage.OP_ACTION_ENCRYPTED) { + return message + } + + if (op === SPMessage.OP_KEY_DEL) { + return (message as SPOpKeyDel).map((key) => { + return maybeEncryptedIncomingData(contractID, state, key as unknown as string, height, additionalKeys, headJSON, undefined) + }) + } + + if (op === SPMessage.OP_KEY_REQUEST_SEEN) { + return maybeEncryptedIncomingData(contractID, state, parsedMessage as unknown as ProtoSPOpKeyRequestSeen, height, additionalKeys, headJSON, undefined) + } + + // If the operation is OP_ATOMIC, call this function recursively + if (op === SPMessage.OP_ATOMIC) { + return (message as SPOpAtomic) + .map(([opT, opV]) => + [ + opT, + decryptedAndVerifiedDeserializedMessage({ ...head, op: opT }, headJSON, contractID, opV, additionalKeys, state) + ] + ) as SPOpAtomic + } + + return message +} + +export class SPMessage { + // flow type annotations to make flow happy + _mapping: { key: string, value: string } + _head: SPHead + _message!: SPOpValue + _signedMessageData: SignedData + _direction: SPMsgDirection + _decryptedValue?: unknown + _innerSigningKeyId?: string + + static OP_CONTRACT = 'c' as const + static OP_ACTION_ENCRYPTED = 'ae' as const // e2e-encrypted action + static OP_ACTION_UNENCRYPTED = 'au' as const // publicly readable action + static OP_KEY_ADD = 'ka' as const // add this key to the list of keys allowed to write to this contract, or update an existing key + static OP_KEY_DEL = 'kd' as const // remove this key from authorized keys + static OP_KEY_UPDATE = 'ku' as const // update key in authorized keys + static OP_PROTOCOL_UPGRADE = 'pu' as const + static OP_PROP_SET = 'ps' as const // set a public key/value pair + static OP_PROP_DEL = 'pd' as const // delete a public key/value pair + static OP_CONTRACT_AUTH = 'ca' as const // authorize a contract + static OP_CONTRACT_DEAUTH = 'cd' as const // deauthorize a contract + static OP_ATOMIC = 'a' as const // atomic op + static OP_KEY_SHARE = 'ks' as const // key share + static OP_KEY_REQUEST = 'kr' as const // key request + static OP_KEY_REQUEST_SEEN = 'krs' as const // key request response + + // eslint-disable-next-line camelcase + static createV1_0 ( + { + contractID, + previousHEAD = null, + previousKeyOp = null, + // Height will be automatically set to the correct value when sending + // The reason to set it to Number.MAX_SAFE_INTEGER is so that we can + // temporarily process outgoing messages with signature validation + // still working + height = Number.MAX_SAFE_INTEGER, + op, + manifest + }: { + contractID: string | null, + previousHEAD?: string | null, + previousKeyOp?: string | null, + height?: number, + op: SPOpRaw, + manifest: string, + } + ): SPMessage { + const head: SPHead = { + version: '1.0.0', + previousHEAD, + previousKeyOp, + height, + contractID, + op: op[0], + manifest + } + return new this(messageToParams(head, op[1])) + } + + // SPMessage.cloneWith could be used when make a SPMessage object having the same id() + // https://github.com/okTurtles/group-income/issues/1503 + static cloneWith ( + targetHead: SPHead, + targetOp: SPOpRaw, + sources: Partial + ): SPMessage { + const head = Object.assign({}, targetHead, sources) + return new this(messageToParams(head, targetOp[1])) + } + + static deserialize (value: string, additionalKeys?: Record, state?: ChelContractState, unwrapMaybeEncryptedDataFn: (data: SPKey | EncryptedData) => { encryptionKeyId: string | null, data: SPKey } | undefined = unwrapMaybeEncryptedData): SPMessage { + if (!value) throw new Error(`deserialize bad value: ${value}`) + const { head: headJSON, ...parsedValue } = JSON.parse(value) + const head = JSON.parse(headJSON) + const contractID = head.op === SPMessage.OP_CONTRACT ? createCID(value, multicodes.SHELTER_CONTRACT_DATA) : head.contractID + + // Special case for OP_CONTRACT, since the keys are not yet present in the + // state + if (!state?._vm?.authorizedKeys && head.op === SPMessage.OP_CONTRACT) { + const value = rawSignedIncomingData(parsedValue) + const authorizedKeys = Object.fromEntries(value.valueOf()?.keys.map(wk => { + const k = unwrapMaybeEncryptedDataFn(wk) + if (!k) return null + return [k.data.id, k.data] as [string, ChelContractKey] + // eslint-disable-next-line no-use-before-define + }).filter(Boolean as unknown as (x: unknown) => x is [string, ChelContractKey])) + state = { + _vm: { + type: head.type, + authorizedKeys + } + } + } + + const signedMessageData = signedIncomingData( + contractID, state, parsedValue, head.height, headJSON, + (message) => decryptedAndVerifiedDeserializedMessage(head, headJSON, contractID, message, additionalKeys, state!) + ) + + return new this({ + direction: 'incoming', + mapping: { key: createCID(value, multicodes.SHELTER_CONTRACT_DATA), value }, + head, + signedMessageData + }) + } + + static deserializeHEAD (value: string): { head: SPHead; hash: string; contractID: string; isFirstMessage: boolean; description: () => string } { + if (!value) throw new Error(`deserialize bad value: ${value}`) + let head: SPHead, hash: string + const result = { + get head () { + if (head === undefined) { + head = JSON.parse(JSON.parse(value).head) + } + return head + }, + get hash () { + if (!hash) { + hash = createCID(value, multicodes.SHELTER_CONTRACT_DATA) + } + return hash + }, + get contractID () { + return result.head?.contractID ?? result.hash + }, + // `description` is not a getter to prevent the value from being copied + // if the object is cloned or serialized + description (): string { + const type = this.head.op + return `` + }, + get isFirstMessage (): boolean { + return !result.head?.contractID + } + } + return result + } + + constructor (params: SPMsgParams) { + this._direction = params.direction + this._mapping = params.mapping + this._head = params.head + this._signedMessageData = params.signedMessageData + + // perform basic sanity check + const type = this.opType() + let atomicTopLevel = true + const validate = (type: string, message: SPOpValue) => { + switch (type) { + case SPMessage.OP_CONTRACT: + if (!this.isFirstMessage() || !atomicTopLevel) throw new Error('OP_CONTRACT: must be first message') + break + case SPMessage.OP_ATOMIC: + if (!atomicTopLevel) { + throw new Error('OP_ATOMIC not allowed inside of OP_ATOMIC') + } + if (!Array.isArray(message)) { + throw new TypeError('OP_ATOMIC must be of an array type') + } + atomicTopLevel = false; + (message as SPOpAtomic).forEach(([t, m]) => validate(t, m)) + break + case SPMessage.OP_KEY_ADD: + case SPMessage.OP_KEY_DEL: + case SPMessage.OP_KEY_UPDATE: + if (!Array.isArray(message)) throw new TypeError('OP_KEY_{ADD|DEL|UPDATE} must be of an array type') + break + case SPMessage.OP_KEY_SHARE: + case SPMessage.OP_KEY_REQUEST: + case SPMessage.OP_KEY_REQUEST_SEEN: + case SPMessage.OP_ACTION_ENCRYPTED: + case SPMessage.OP_ACTION_UNENCRYPTED: + // nothing for now + break + default: + throw new Error(`unsupported op: ${type}`) + } + } + + // this._message is set as a getter to verify the signature only once the + // message contents are read + Object.defineProperty(this, '_message', { + get: ((validated?: boolean) => () => { + const message = this._signedMessageData.valueOf() + // If we haven't validated the message, validate it now + if (!validated) { + validate(type, message) + validated = true + } + return message + })() + }) + } + + decryptedValue (): unknown | undefined { + if (this._decryptedValue) return this._decryptedValue + try { + const value = this.message() + // TODO: This uses `unwrapMaybeEncryptedData` instead of a configurable + // version based on `skipDecryptionAttempts`. This is fine based on current + // use, and also something else might be confusing based on the explicit + // name of this function, `decryptedValue`. + const data = unwrapMaybeEncryptedData(value) + // Did decryption succeed? (unwrapMaybeEncryptedData will return undefined + // on failure) + if (data?.data) { + // The data inside could be signed. In this case, we unwrap that to get + // to the inner contents + if (isSignedData(data.data)) { + this._innerSigningKeyId = data.data.signingKeyId + this._decryptedValue = data.data.valueOf() + } else { + this._decryptedValue = data.data + } + } + return this._decryptedValue + } catch { + // Signature or encryption error + // We don't log this error because it's already logged when the value is + // retrieved + return undefined + } + } + + innerSigningKeyId (): string | undefined { + if (!this._decryptedValue) { + this.decryptedValue() + } + return this._innerSigningKeyId + } + + head (): SPHead { return this._head } + + message (): SPOpValue { return this._message } + + op (): SPOp { return [this.head().op, this.message()] as SPOp } + + rawOp (): SPOpRaw { return [this.head().op, this._signedMessageData] } + + opType (): SPOpType { return this.head().op } + + opValue (): SPOpValue { return this.message() } + + signingKeyId (): string { return this._signedMessageData.signingKeyId } + + manifest (): string { return this.head().manifest } + + description (): string { + const type = this.opType() + let desc = `` + } + + isFirstMessage (): boolean { return !this.head().contractID } + + contractID (): string { return this.head().contractID || this.hash() } + + serialize (): string { return this._mapping.value } + + hash (): string { return this._mapping.key } + + previousKeyOp (): string | null { return this._head.previousKeyOp } + + height (): number { return this._head.height } + + id (): string { + // TODO: Schedule for later removal + throw new Error('SPMessage.id() was called but it has been removed') + } + + direction (): 'incoming' | 'outgoing' { + return this._direction + } + + // `isKeyOp` is used to filter out non-key operations for providing an + // abbreviated chain fo snapshot validation + isKeyOp (): boolean { + let value: SPOpValue + return !!( + (keyOps as SPOpType[]).includes(this.opType()) || + (this.opType() === SPMessage.OP_ATOMIC && Array.isArray(value = this.opValue()) && (value as SPOpAtomic).some(([opT]) => { + return (keyOps as SPOpType[]).includes(opT) + })) + ) + } + + static get [serdesTagSymbol] () { + return 'SPMessage' + } + + static [serdesSerializeSymbol] (m: SPMessage) { + return [m.serialize(), m.direction(), m.decryptedValue(), m.innerSigningKeyId()] + } + + static [serdesDeserializeSymbol] ([serialized, direction, decryptedValue, innerSigningKeyId]: [string, SPMsgDirection, object, string]) { + const m = SPMessage.deserialize(serialized) + m._direction = direction + m._decryptedValue = decryptedValue + m._innerSigningKeyId = innerSigningKeyId + return m + } +} + +function messageToParams (head: SPHead, message: SignedData): SPMsgParams { + // NOTE: the JSON strings generated here must be preserved forever. + // do not ever regenerate this message using the contructor. + // instead store it using serialize() and restore it using deserialize(). + // The issue is that different implementations of JavaScript engines might generate different strings + // when serializing JS objects using JSON.stringify + // and that would lead to different hashes resulting from createCID. + // So to get around this we save the serialized string upon creation + // and keep a copy of it (instead of regenerating it as needed). + // https://github.com/okTurtles/group-income/pull/1513#discussion_r1142809095 + let mapping: { key: string, value: string } + return { + direction: has(message, 'recreate') ? 'outgoing' : 'incoming', + // Lazy computation of mapping to prevent us from serializing outgoing + // atomic operations + get mapping () { + if (!mapping) { + const headJSON = JSON.stringify(head) + const messageJSON = { ...message.serialize(headJSON), head: headJSON } + const value = JSON.stringify(messageJSON) + + mapping = { + key: createCID(value, multicodes.SHELTER_CONTRACT_DATA), + value + } + } + return mapping + }, + head, + signedMessageData: message + } +} + +// Operations that affect valid keys +const keyOps = [SPMessage.OP_CONTRACT, SPMessage.OP_KEY_ADD, SPMessage.OP_KEY_DEL, SPMessage.OP_KEY_UPDATE] diff --git a/src/Secret.ts b/src/Secret.ts new file mode 100644 index 0000000..b3694bd --- /dev/null +++ b/src/Secret.ts @@ -0,0 +1,31 @@ +import { serdesDeserializeSymbol, serdesSerializeSymbol, serdesTagSymbol } from '@chelonia/serdes' + +/* Wrapper class for secrets, which identifies them as such and prevents them +from being logged */ + +// Use a `WeakMap` to store the actual secret outside of the returned `Secret` +// object. This ensures that the only way to access the secret is via the +// `.valueOf()` method, and it prevents accidentally logging things that +// shouldn't be logged. +const wm = new WeakMap() +export class Secret { + static [serdesDeserializeSymbol] (secret: T) { + return new this(secret) + } + + static [serdesSerializeSymbol] (secret: Secret) { + return wm.get(secret) + } + + static get [serdesTagSymbol] () { + return '__chelonia_Secret' + } + + constructor (value: T) { + wm.set(this, value) + } + + valueOf (): T { + return wm.get(this) + } +} diff --git a/src/chelonia-utils.ts b/src/chelonia-utils.ts new file mode 100644 index 0000000..9643fe9 --- /dev/null +++ b/src/chelonia-utils.ts @@ -0,0 +1,32 @@ +import sbp from '@sbp/sbp' +import type { ChelKvOnConflictCallback, JSONType } from './types.js' + +// This file contains non-core parts of Chelonia, i.e., functionality that is +// useful but optional. The threshold for something being 'optional' generally +// is something that can be implemented externally using only public Chelonia +// selectors. +// Optional functionality can make certain assumptions about contracts or +// actions to make things simpler or easier to implement. +// Currently, a single selector is defined: 'chelonia/kv/queuedSet'. +// TODO: Other things should be moved to this file, such as `encryptedAction` +// (the wrapper) and 'gi.actions/out/rotateKeys'. + +export default sbp('sbp/selectors/register', { + // This selector is a wrapper for the `chelonia/kv/set` selector that uses + // the contract queue and allows referring to keys by name, with default key + // names set to `csk` and `cek` for signatures and encryption, respectively. + // For most 'simple' use cases, this selector is a better choice than + // `chelonia/kv/set`. However, the `chelonia/kv/set` primitive is needed if + // the queueing logic needs to be more advanced, the key to use requires + // custom logic or _if the `onconflict` callback also needs to be queued_. + 'chelonia/kv/queuedSet': ({ contractID, key, data, onconflict, ifMatch, encryptionKeyName = 'cek', signingKeyName = 'csk' }: { contractID: string, key: string, data: JSONType, onconflict?: ChelKvOnConflictCallback, ifMatch?: string, encryptionKeyName: string, signingKeyName: string }) => { + return sbp('chelonia/queueInvocation', contractID, () => { + return sbp('chelonia/kv/set', contractID, key, data, { + ifMatch, + encryptionKeyId: sbp('chelonia/contract/currentKeyIdByName', contractID, encryptionKeyName), + signingKeyId: sbp('chelonia/contract/currentKeyIdByName', contractID, signingKeyName), + onconflict + }) + }) + } +}) as string[] diff --git a/src/chelonia.ts b/src/chelonia.ts new file mode 100644 index 0000000..bdfc06d --- /dev/null +++ b/src/chelonia.ts @@ -0,0 +1,2068 @@ +import '@sbp/okturtles.eventqueue' +import '@sbp/okturtles.events' +import sbp from '@sbp/sbp' +import { cloneDeep, delay, difference, has, intersection, merge, randomHexString, randomIntFromRange } from 'turtledash' +import { createCID, parseCID } from './functions.js' +import { Buffer } from 'buffer' +import { NOTIFICATION_TYPE, createClient } from './pubsub/index.js' +import type { SPKey, SPKeyPurpose, SPOpActionUnencrypted, SPOpContract, SPOpKeyAdd, SPOpKeyDel, SPOpKeyRequest, SPOpKeyRequestSeen, SPOpKeyShare, SPOpKeyUpdate, SPOpValue } from './SPMessage.js' +import type { Key } from '@chelonia/crypto' +import { EDWARDS25519SHA512BATCH, deserializeKey, keyId, keygen, serializeKey } from '@chelonia/crypto' +import { ChelErrorResourceGone, ChelErrorUnexpected, ChelErrorUnexpectedHttpResponseCode, ChelErrorUnrecoverable } from './errors.js' +import { CHELONIA_RESET, CONTRACTS_MODIFIED, CONTRACT_REGISTERED } from './events.js' +import { SPMessage } from './SPMessage.js' +import type { Secret } from './Secret.js' +import './chelonia-utils.js' +import type { EncryptedData } from './encryptedData.js' +import { encryptedOutgoingData, encryptedOutgoingDataWithRawKey, isEncryptedData, maybeEncryptedIncomingData, unwrapMaybeEncryptedData } from './encryptedData.js' +import './files.js' +import './internals.js' +import { isSignedData, signedIncomingData, signedOutgoingData, signedOutgoingDataWithRawKey } from './signedData.js' +import './time-sync.js' +import { buildShelterAuthorizationHeader, checkCanBeGarbageCollected, clearObject, collectEventStream, eventsAfter, findForeignKeysByContractID, findKeyIdByName, findRevokedKeyIdsByName, findSuitableSecretKeyId, getContractIDfromKeyId, handleFetchResult, reactiveClearObject } from './utils.js' +import { ChelContractKey, ChelContractProcessMessageObject, ChelContractSideeffectMutationObject, ChelContractState, ChelKvOnConflictCallback, ChelRootState, CheloniaConfig, CheloniaContext, CheloniaContractCtx, JSONType, ParsedEncryptedOrUnencryptedMessage } from './types.js' +import type { Options as PubSubOptions, PubSubClient } from './pubsub/index.js' + +// TODO: define ChelContractType for /defineContract + +export type ChelRegParams = { + contractName: string; + server?: string; // TODO: implement! + data: object; + signingKeyId: string; + actionSigningKeyId: string; + actionEncryptionKeyId?: string | null | undefined; + keys: (SPKey | EncryptedData)[]; + namespaceRegistration?: string | null | undefined; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + postpublishContract?: (msg: SPMessage) => void; + preSendCheck?: (msg: SPMessage, state: ChelContractState) => void; + beforeRequest?: (msg1: SPMessage, msg2: SPMessage) => Promise | void; + prepublish?: (msg: SPMessage) => Promise | void; + postpublish?: (msg: SPMessage) => Promise | void; + onprocessed?: (msg: SPMessage) => Promise | void; + }; + publishOptions?: { headers?: Record | null | undefined, billableContractID?: string | null | undefined, maxAttempts?: number | null | undefined }; +} + +export type ChelActionParams = { + action: string; + server?: string; // TODO: implement! + contractID: string; + data: object; + signingKeyId: string; + innerSigningKeyId: string; + encryptionKeyId?: string | null | undefined; + encryptionKey?: Key | null | undefined, + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise | void; + postpublish?: (msg: SPMessage) => Promise | void; + }; + publishOptions?: { maxAttempts?: number }; + atomic: boolean; +} + +export type ChelKeyAddParams = { + contractName: string; + contractID: string; + data: SPOpKeyAdd; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise | void; + postpublish?: (msg: SPMessage) => Promise | void; + }; + publishOptions?: { maxAttempts?: number }; + atomic: boolean; +} + +export type ChelKeyDelParams = { + contractName: string; + contractID: string; + data: SPOpKeyDel; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { maxAttempts?: number }; + atomic: boolean; +} + +export type ChelKeyUpdateParams = { + contractName: string; + contractID: string; + data: SPOpKeyUpdate; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { maxAttempts?: number }; + atomic: boolean; +} + +export type ChelKeyShareParams = { + originatingContractID?: string; + originatingContractName?: string; + contractID: string; + contractName: string; + data: SPOpKeyShare; + signingKeyId?: string; + signingKey?: Key; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { maxAttempts: number }; + atomic: boolean; +} + +export type ChelKeyRequestParams = { + originatingContractID: string; + originatingContractName: string; + contractName: string; + contractID: string; + signingKeyId: string; + innerSigningKeyId: string; + encryptionKeyId: string; + innerEncryptionKeyId: string; + encryptKeyRequestMetadata?: boolean; + permissions?: '*' | string[]; + allowedActions?: '*' | string[]; + // Arbitrary data the requester can use as reference (e.g., the hash + // of the user-initiated action that triggered this key request) + reference?: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { maxAttempts?: number }; + atomic: boolean; +} + +export type ChelKeyRequestResponseParams = { + contractName: string; + contractID: string; + data: SPOpKeyRequestSeen; + signingKeyId: string; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { maxAttempts?: number }; + atomic: boolean; +} + +export type ChelAtomicParams = { + originatingContractID: string; + originatingContractName: string; + contractName: string; + contractID: string; + signingKeyId: string; + data: [sel: string, data: ChelActionParams | ChelKeyRequestParams | ChelKeyShareParams][]; + hooks?: { + prepublishContract?: (msg: SPMessage) => void; + prepublish?: (msg: SPMessage) => Promise; + postpublish?: (msg: SPMessage) => Promise; + }; + publishOptions?: { maxAttempts?: number }; +} + +export { SPMessage } + +export const ACTION_REGEX: RegExp = /^((([\w.]+)\/([^/]+))(?:\/(?:([^/]+)\/)?)?)\w*/ +// ACTION_REGEX.exec('gi.contracts/group/payment/process') +// 0 => 'gi.contracts/group/payment/process' +// 1 => 'gi.contracts/group/payment/' +// 2 => 'gi.contracts/group' +// 3 => 'gi.contracts' +// 4 => 'group' +// 5 => 'payment' + +export default sbp('sbp/selectors/register', { + // https://www.wordnik.com/words/chelonia + // https://gitlab.okturtles.org/okturtles/group-income/-/wikis/E2E-Protocol/Framework.md#alt-names + 'chelonia/_init': function (this: CheloniaContext) { + this.config = { + // TODO: handle connecting to multiple servers for federation + get connectionURL () { throw new Error('Invalid use of connectionURL before initialization') }, + // override! + set connectionURL (value: string) { Object.defineProperty(this, 'connectionURL', { value, writable: true }) }, + stateSelector: 'chelonia/private/state', // override to integrate with, for example, vuex + contracts: { + defaults: { + modules: {}, // '' => resolved module import + exposedGlobals: {}, + allowedDomains: [], + allowedSelectors: [], + preferSlim: false + }, + overrides: {}, // override default values per-contract + manifests: {} // override! contract names => manifest hashes + }, + whitelisted: (action: string): boolean => !!this.whitelistedActions[action], + reactiveSet: (obj, key, value) => { obj[key] = value; return value }, // example: set to Vue.set + fetch: (...args) => fetch(...args), + reactiveDel: (obj, key) => { delete obj[key] }, + // acceptAllMessages disables checking whether we are expecting a message + // or not for processing + acceptAllMessages: false, + skipActionProcessing: false, + skipDecryptionAttempts: false, + skipSideEffects: false, + // Strict processing will treat all processing errors as unrecoverable + // This is useful, e.g., in the server, to prevent invalid messages from + // being added to the database + strictProcessing: false, + // Strict ordering will throw on past events with ChelErrorAlreadyProcessed + // Similarly, future events will not be reingested and will throw + // with ChelErrorDBBadPreviousHEAD + strictOrdering: false, + connectionOptions: { + maxRetries: Infinity, // See https://github.com/okTurtles/group-income/issues/1183 + reconnectOnTimeout: true // can be enabled since we are not doing auth via web sockets + }, + hooks: { + preHandleEvent: null, // async (message: SPMessage) => {} + postHandleEvent: null, // async (message: SPMessage) => {} + processError: null, // (e: Error, message: SPMessage) => {} + sideEffectError: null, // (e: Error, message: SPMessage) => {} + handleEventError: null, // (e: Error, message: SPMessage) => {} + syncContractError: null, // (e: Error, contractID: string) => {} + pubsubError: null // (e:Error, socket: Socket) + }, + unwrapMaybeEncryptedData + } + // Used in publishEvent to cancel sending events after reset (logout) + this._instance = Object.create(null) + this.abortController = new AbortController() + this.state = { + contracts: {}, // contractIDs => { type, HEAD } (contracts we've subscribed to) + pending: [] // prevents processing unexpected data from a malicious server + } + this.manifestToContract = {} + this.whitelistedActions = {} + this.currentSyncs = Object.create(null) + this.postSyncOperations = Object.create(null) + this.sideEffectStacks = Object.create(null) // [contractID]: Array + this.sideEffectStack = (contractID: string) => { + let stack = this.sideEffectStacks[contractID] + if (!stack) { + this.sideEffectStacks[contractID] = stack = [] + } + return stack + } + // setPostSyncOp defines operations to be run after all recent events have + // been processed. This is useful, for example, when responding to + // OP_KEY_REQUEST, as we want to send an OP_KEY_SHARE only to yet-unanswered + // requests, which is information in the future (from the point of view of + // the event handler). + // We could directly enqueue the operations, but by using a map we avoid + // enqueueing more operations than necessary + // The operations defined here will be executed: + // (1) After a call to /sync or /syncContract; or + // (2) After an event has been handled, if it was received on a web socket + this.setPostSyncOp = (contractID: string, key: string, op: Parameters) => { + this.postSyncOperations[contractID] = this.postSyncOperations[contractID] || Object.create(null) + this.postSyncOperations[contractID][key] = op + } + const secretKeyGetter = (o: Record, p: string) => { + if (has(o, p)) return o[p] + const rootState = sbp(this.config.stateSelector) + if (rootState?.secretKeys && has(rootState.secretKeys, p)) { + const key = deserializeKey(rootState.secretKeys[p]) + o[p] = key + return key + } + } + const secretKeyList = (o: Record) => { + const rootState = sbp(this.config.stateSelector) + const stateKeys = Object.keys(rootState?.secretKeys || {}) + return Array.from(new Set([...Object.keys(o), ...stateKeys])) + } + this.transientSecretKeys = new Proxy(Object.create(null), { + get: secretKeyGetter, + ownKeys: secretKeyList + }) + this.ephemeralReferenceCount = Object.create(null) + // subscriptionSet includes all the contracts in state.contracts for which + // we can process events (contracts for which we have called /sync) + // The reason we can't use, e.g., Object.keys(state.contracts), is that + // when resetting the state (calling /reset, e.g., after logging out) we may + // still receive events for old contracts that belong to the old session. + // Those events must be ignored or discarded until the new session is set up + // (i.e., login has finished running) because we don't necessarily have + // all the information needed to process events in those contracts, such as + // secret keys. + // A concrete example is: + // 1. user1 logs in to the group and rotates the group keys, then logs out + // 2. user2 logs in to the group. + // 3. If an event came over the web socket for the group, we must not + // process it before we've processed the OP_KEY_SHARE containing the + // new keys, or else we'll build an incorrect state. + // The example above is simplified, but this is even more of an issue + // when there is a third contract (for example, a group chatroom) using + // those rotated keys as foreign keys. + this.subscriptionSet = new Set() + // pending includes contracts that are scheduled for syncing or in the + // process of syncing for the first time. After sync completes for the + // first time, they are removed from pending and added to subscriptionSet + this.pending = [] + }, + 'chelonia/config': function (this: CheloniaContext) { + return { + ...cloneDeep(this.config), + fetch: this.config.fetch, + reactiveSet: this.config.reactiveSet, + reactiveDel: this.config.reactiveDel + } + }, + 'chelonia/configure': async function (this: CheloniaContext, config: CheloniaConfig) { + merge(this.config, config) + // merge will strip the hooks off of config.hooks when merging from the root of the object + // because they are functions and cloneDeep doesn't clone functions + Object.assign(this.config.hooks, config.hooks || {}) + // using Object.assign here instead of merge to avoid stripping away imported modules + if (config.contracts) { + Object.assign(this.config.contracts.defaults, config.contracts.defaults || {}) + const manifests = this.config.contracts.manifests + console.debug('[chelonia] preloading manifests:', Object.keys(manifests)) + for (const contractName in manifests) { + await sbp('chelonia/private/loadManifest', contractName, manifests[contractName]) + } + } + if (has(config, 'skipDecryptionAttempts')) { + if (config.skipDecryptionAttempts) { + this.config.unwrapMaybeEncryptedData = (data) => { + if (!isEncryptedData(data)) { + return { + encryptionKeyId: null, data + } + } + } + } else { + this.config.unwrapMaybeEncryptedData = unwrapMaybeEncryptedData + } + } + }, + 'chelonia/reset': async function (this: CheloniaContext, newState: ChelRootState | undefined, postCleanupFn?: () => Promise | void) { + // Allow optional newState OR postCleanupFn + if (typeof newState === 'function' && typeof postCleanupFn === 'undefined') { + postCleanupFn = newState + newState = undefined + } + if (this.pubsub) { + sbp('chelonia/private/stopClockSync') + } + // wait for any pending sync operations to finish before saving + Object.keys(this.postSyncOperations).forEach(cID => { + sbp('chelonia/private/enqueuePostSyncOps', cID) + }) + await sbp('chelonia/contract/waitPublish') + await sbp('chelonia/contract/wait') + // do this again to catch operations that are the result of side-effects + // or post sync ops + Object.keys(this.postSyncOperations).forEach(cID => { + sbp('chelonia/private/enqueuePostSyncOps', cID) + }) + await sbp('chelonia/contract/waitPublish') + await sbp('chelonia/contract/wait') + const result = await postCleanupFn?.() + // The following are all synchronous operations + const rootState = sbp(this.config.stateSelector) + // Cancel all outgoing messages by replacing this._instance + this._instance = Object.create(null) + this.abortController.abort() + this.abortController = new AbortController() + // Remove all contracts, including all contracts from pending + reactiveClearObject(rootState, this.config.reactiveDel) + this.config.reactiveSet(rootState, 'contracts', Object.create(null)) + clearObject(this.ephemeralReferenceCount) + this.pending.splice(0) + clearObject(this.currentSyncs) + clearObject(this.postSyncOperations) + clearObject(this.sideEffectStacks) + const removedContractIDs = Array.from(this.subscriptionSet) + this.subscriptionSet.clear() + sbp('chelonia/clearTransientSecretKeys') + sbp('okTurtles.events/emit', CHELONIA_RESET) + sbp('okTurtles.events/emit', CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { added: [], removed: removedContractIDs }) + if (this.pubsub) { + sbp('chelonia/private/startClockSync') + } + if (newState) { + Object.entries(newState).forEach(([key, value]) => { + this.config.reactiveSet(rootState, key, value) + }) + } + return result + }, + 'chelonia/storeSecretKeys': function (this: CheloniaContext, wkeys: Secret<{key: Key | string, transient?: boolean}[]>) { + const rootState = sbp(this.config.stateSelector) + if (!rootState.secretKeys) this.config.reactiveSet(rootState, 'secretKeys', Object.create(null)) + let keys = wkeys.valueOf() + if (!keys) return + if (!Array.isArray(keys)) keys = [keys] + keys.forEach(({ key, transient }) => { + if (!key) return + if (typeof key === 'string') { + key = deserializeKey(key) + } + const id = keyId(key) + // Store transient keys transientSecretKeys + if (!has(this.transientSecretKeys, id)) { + this.transientSecretKeys[id] = key + } + if (transient) return + // If the key is marked as persistent, write it to the state as well + if (!has(rootState.secretKeys, id)) { + this.config.reactiveSet(rootState.secretKeys, id, serializeKey(key, true)) + } + }) + }, + 'chelonia/clearTransientSecretKeys': function (this: CheloniaContext, ids?: string[]) { + if (Array.isArray(ids)) { + ids.forEach((id) => { + delete this.transientSecretKeys[id] + }) + } else { + Object.keys(this.transientSecretKeys).forEach((id) => { + delete this.transientSecretKeys[id] + }) + } + }, + 'chelonia/haveSecretKey': function (this: CheloniaContext, keyId: string, persistent?: boolean) { + if (!persistent && has(this.transientSecretKeys, keyId)) return true + const rootState = sbp(this.config.stateSelector) + return !!rootState?.secretKeys && has(rootState.secretKeys, keyId) + }, + 'chelonia/contract/isResyncing': function (this: CheloniaContext, contractIDOrState: string | ChelContractState) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector) + contractIDOrState = rootState[contractIDOrState] + } + return !!(contractIDOrState as ChelContractState)?._volatile?.dirty || !!(contractIDOrState as ChelContractState)?._volatile?.resyncing + }, + 'chelonia/contract/hasKeyShareBeenRespondedBy': function (this: CheloniaContext, contractIDOrState: string | ChelContractState | null | undefined, requestedToContractID: string, reference?: string): boolean { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector) + contractIDOrState = rootState[contractIDOrState] + } + const result = Object.values((contractIDOrState as ChelContractState)?._vm.authorizedKeys || {}).some((r) => { + return r?.meta?.keyRequest?.responded && r.meta.keyRequest.contractID === requestedToContractID && (!reference || r.meta.keyRequest.reference === reference) + }) + + return result + }, + 'chelonia/contract/waitingForKeyShareTo': function (this: CheloniaContext, contractIDOrState: string | ChelContractState, requestingContractID?: string, reference?: string): null | string[] { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector) as ChelRootState + contractIDOrState = rootState[contractIDOrState] + } + const result = (contractIDOrState as ChelContractState)._volatile?.pendingKeyRequests + ?.filter((r) => { + return r && (!requestingContractID || r.contractID === requestingContractID) && (!reference || r.reference === reference) + }) + ?.map(({ name }) => name) + + if (!result?.length) return null + return result + }, + 'chelonia/contract/successfulKeySharesByContractID': function (this: CheloniaContext, contractIDOrState: string | ChelContractState, requestingContractID?: string) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector) + contractIDOrState = rootState[contractIDOrState] + } + const keyShares = Object.values((contractIDOrState as ChelContractState)._vm.keyshares || {}) + if (!keyShares?.length) return + const result = Object.create(null) as Record + keyShares.forEach((kS) => { + if (!kS.success) return + if (requestingContractID && kS.contractID !== requestingContractID) return + if (!result[kS.contractID]) result[kS.contractID] = [] + result[kS.contractID].push({ height: kS.height, hash: kS.hash }) + }) + Object.keys(result).forEach(cID => { + result[cID].sort((a, b) => { + return b.height - a.height + }) + }) + return result + }, + 'chelonia/contract/hasKeysToPerformOperation': function (this: CheloniaContext, contractIDOrState: string | ChelContractState, operation: string) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector) + contractIDOrState = rootState[contractIDOrState] + } + const op = (operation !== '*') ? [operation] : operation + return !!findSuitableSecretKeyId((contractIDOrState as ChelContractState), op, ['sig']) + }, + // Did sourceContractIDOrState receive an OP_KEY_SHARE to perform the given + // operation on contractIDOrState? + 'chelonia/contract/receivedKeysToPerformOperation': function (this: CheloniaContext, sourceContractIDOrState: string | ChelContractState, contractIDOrState: string | ChelContractState, operation: string) { + const rootState = sbp(this.config.stateSelector) + if (typeof sourceContractIDOrState === 'string') { + sourceContractIDOrState = rootState[sourceContractIDOrState] + } + if (typeof contractIDOrState === 'string') { + contractIDOrState = rootState[contractIDOrState] + } + const op = (operation !== '*') ? [operation] : operation + const keyId = findSuitableSecretKeyId(contractIDOrState as ChelContractState, op, ['sig']) + + return (sourceContractIDOrState as ChelContractState)?._vm?.sharedKeyIds?.some((sK) => sK.id === keyId) + }, + 'chelonia/contract/currentKeyIdByName': function (this: CheloniaContext, contractIDOrState: string | ChelContractState, name: string, requireSecretKey?: boolean) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector) + contractIDOrState = rootState[contractIDOrState] + } + const currentKeyId = findKeyIdByName(contractIDOrState as ChelContractState, name) + if (requireSecretKey && !sbp('chelonia/haveSecretKey', currentKeyId)) { + return + } + return currentKeyId + }, + 'chelonia/contract/foreignKeysByContractID': function (this: CheloniaContext, contractIDOrState: string | ChelContractState, foreignContractID: string) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector) + contractIDOrState = rootState[contractIDOrState] + } + return findForeignKeysByContractID(contractIDOrState as ChelContractState, foreignContractID) + }, + 'chelonia/contract/historicalKeyIdsByName': function (this: CheloniaContext, contractIDOrState: string | ChelContractState, name: string) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector) + contractIDOrState = rootState[contractIDOrState] + } + const currentKeyId = findKeyIdByName(contractIDOrState as ChelContractState, name) + const revokedKeyIds = findRevokedKeyIdsByName(contractIDOrState as ChelContractState, name) + return currentKeyId ? [currentKeyId, ...revokedKeyIds] : revokedKeyIds + }, + 'chelonia/contract/suitableSigningKey': function (this: CheloniaContext, contractIDOrState: string | ChelContractState, permissions: '*' | string[], purposes: SPKeyPurpose[], ringLevel?: number, allowedActions?: '*' | string[]) { + if (typeof contractIDOrState === 'string') { + const rootState = sbp(this.config.stateSelector) + contractIDOrState = rootState[contractIDOrState] + } + const keyId = findSuitableSecretKeyId(contractIDOrState as ChelContractState, permissions, purposes, ringLevel, allowedActions) + return keyId + }, + 'chelonia/contract/setPendingKeyRevocation': function (this: CheloniaContext, contractID: string, names: string[]) { + const rootState = sbp(this.config.stateSelector) + const state = rootState[contractID] + + if (!state._volatile) this.config.reactiveSet(state, '_volatile', Object.create(null)) + if (!state._volatile.pendingKeyRevocations) this.config.reactiveSet(state._volatile, 'pendingKeyRevocations', Object.create(null)) + + for (const name of names) { + const keyId = findKeyIdByName(state, name) + if (keyId) { + this.config.reactiveSet(state._volatile.pendingKeyRevocations, keyId, true) + } else { + console.warn('[setPendingKeyRevocation] Unable to find keyId for name', { contractID, name }) + } + } + }, + 'chelonia/shelterAuthorizationHeader' (this: CheloniaContext, contractID: string) { + return buildShelterAuthorizationHeader.call(this, contractID) + }, + // The purpose of the 'chelonia/crypto/*' selectors is so that they can be called + // from contracts without including the crypto code (i.e., importing crypto.js) + // This function takes a function as a parameter that returns a string + // It does not a string directly to prevent accidentally logging the value, + // which is a secret + 'chelonia/crypto/keyId': (inKey: Secret) => { + return keyId(inKey.valueOf()) + }, + // TODO: allow connecting to multiple servers at once + 'chelonia/connect': function (this: CheloniaContext, options: Partial = {}): PubSubClient { + if (!this.config.connectionURL) throw new Error('config.connectionURL missing') + if (!this.config.connectionOptions) throw new Error('config.connectionOptions missing') + if (this.pubsub) { + this.pubsub.destroy() + } + let pubsubURL = this.config.connectionURL + if (process.env.NODE_ENV === 'development') { + // This is temporarily used in development mode to help the server improve + // its console output until we have a better solution. Do not use for auth. + pubsubURL += `?debugID=${randomHexString(6)}` + } + if (this.pubsub) { + sbp('chelonia/private/stopClockSync') + } + sbp('chelonia/private/startClockSync') + this.pubsub = createClient(pubsubURL, { + ...this.config.connectionOptions, + handlers: { + ...options.handlers, + // Every time we get a REQUEST_TYPE.SUB response, which happens for + // 'new' subscriptions as well as every time the connection is reset + 'subscription-succeeded': function (event: CustomEvent<{channelID: string}>) { + const { channelID } = event.detail + // The check below is needed because we could have unsubscribed since + // requesting a subscription from the server. In that case, we don't + // need to call `sync`. + if (this.subscriptionSet.has(channelID)) { + // For new subscriptions, some messages could have been lost + // between the time the subscription was requested and it was + // actually set up. In these cases, force sync contracts to get them + // updated. + sbp('chelonia/private/out/sync', channelID, { force: true }).catch((err: Error) => { + console.warn(`[chelonia] Syncing contract ${channelID} failed: ${err.message}`) + }) + } + options.handlers?.['subscription-succeeded']?.call(this, event) + } + }, + // Map message handlers to transparently handle encryption and signatures + messageHandlers: { + ...(Object.fromEntries( + Object.entries(options.messageHandlers || {}).map(([k, v]) => { + switch (k) { + case NOTIFICATION_TYPE.PUB: + return [k, (msg: { data?: { + height: string, + _signedData: [string, string, string], + }, channelID: string }) => { + if (!msg.channelID) { + console.info('[chelonia] Discarding pub event without channelID') + return + } + if (!this.subscriptionSet.has(msg.channelID)) { + console.info(`[chelonia] Discarding pub event for ${msg.channelID} because it's not in the current subscriptionSet`) + return + } + sbp('chelonia/queueInvocation', msg.channelID, () => { + (v as (this: PubSubClient, msg: ParsedEncryptedOrUnencryptedMessage) => void).call(this.pubsub, parseEncryptedOrUnencryptedMessage<{ channelID: string, data: JSONType }>(this, { + contractID: msg.channelID, + serializedData: msg.data! + })) + }).catch((e: Error) => { + console.error(`[chelonia] Error processing pub event for ${msg.channelID}`, e) + }) + }] + case NOTIFICATION_TYPE.KV: + return [k, (msg: { channelID: string, key: string, data: string }) => { + if (!msg.channelID || !msg.key) { + console.info('[chelonia] Discarding kv event without channelID or key') + return + } + if (!this.subscriptionSet.has(msg.channelID)) { + console.info(`[chelonia] Discarding kv event for ${msg.channelID} because it's not in the current subscriptionSet`) + return + } + sbp('chelonia/queueInvocation', msg.channelID, () => { + (v as unknown as (this: PubSubClient, msg: [string, ParsedEncryptedOrUnencryptedMessage]) => void).call(this.pubsub, [msg.key, parseEncryptedOrUnencryptedMessage(this, { + contractID: msg.channelID, + meta: msg.key, + serializedData: JSON.parse(Buffer.from(msg.data).toString()) + })]) + }).catch((e: unknown) => { + console.error(`[chelonia] Error processing kv event for ${msg.channelID} and key ${msg.key}`, msg, e) + }) + }] + case NOTIFICATION_TYPE.DELETION: + return [k, (msg: { data: unknown }) => (v as unknown as (this: PubSubClient, data: unknown) => void).call(this.pubsub, msg.data)] + default: + return [k, v] + } + }) + )), + [NOTIFICATION_TYPE.ENTRY] (msg) { + // We MUST use 'chelonia/private/in/enqueueHandleEvent' to ensure handleEvent() + // is called AFTER any currently-running calls to 'chelonia/private/out/sync' + // to prevent gi.db from throwing "bad previousHEAD" errors. + // Calling via SBP also makes it simple to implement 'test/backend.js' + const { contractID } = SPMessage.deserializeHEAD(msg.data as string) + sbp('chelonia/private/in/enqueueHandleEvent', contractID, msg.data) + } + } + }) + if (!this.contractsModifiedListener) { + // Keep pubsub in sync (logged into the right "rooms") with 'state.contracts' + this.contractsModifiedListener = () => sbp('chelonia/pubsub/update') + sbp('okTurtles.events/on', CONTRACTS_MODIFIED, this.contractsModifiedListener) + } + return this.pubsub + }, + // This selector is defined primarily for ingesting web push notifications, + // although it can be used as a general-purpose API to process events received + // from other external sources that are not managed by Chelonia itself (i.e. sources + // other than the Chelonia-managed websocket connection and RESTful API). + 'chelonia/handleEvent': async function (event: string) { + const { contractID } = SPMessage.deserializeHEAD(event) + return await sbp('chelonia/private/in/enqueueHandleEvent', contractID, event) + }, + 'chelonia/defineContract': function (this: CheloniaContext, contract: CheloniaContractCtx) { + if (!ACTION_REGEX.exec(contract.name)) throw new Error(`bad contract name: ${contract.name}`) + if (!contract.metadata) contract.metadata = { validate () {}, create: () => ({}) } + if (!contract.getters) contract.getters = {} + contract.state = (contractID) => sbp(this.config.stateSelector)[contractID] + contract.manifest = this.defContractManifest + contract.sbp = this.defContractSBP + this.defContractSelectors = [] + this.defContract = contract + this.defContractSelectors.push(...sbp('sbp/selectors/register', { + // expose getters for Vuex integration and other conveniences + [`${contract.manifest}/${contract.name}/getters`]: () => contract.getters, + // 2 ways to cause sideEffects to happen: by defining a sideEffect function in the + // contract, or by calling /pushSideEffect w/async SBP call. Can also do both. + [`${contract.manifest}/${contract.name}/pushSideEffect`]: (contractID: string, asyncSbpCall: [string, ...unknown[]]) => { + // if this version of the contract is pushing a sideEffect to a function defined by the + // contract itself, make sure that it calls the same version of the sideEffect + const [sel] = asyncSbpCall + if (sel.startsWith(contract.name + '/')) { + asyncSbpCall[0] = `${contract.manifest}/${sel}` + } + this.sideEffectStack(contractID).push(asyncSbpCall) + } + })) + for (const action in contract.actions) { + contractNameFromAction(action) // ensure actions are appropriately named + this.whitelistedActions[action] = true + // TODO: automatically generate send actions here using `${action}/send` + // allow the specification of: + // - the optype (e.g. OP_ACTION_(UN)ENCRYPTED) + // - a localized error message + // - whatever keys should be passed in as well + // base it off of the design of encryptedAction() + this.defContractSelectors.push(...sbp('sbp/selectors/register', { + [`${contract.manifest}/${action}/process`]: async (message: ChelContractProcessMessageObject, state: ChelContractState) => { + const { meta, data, contractID } = message + // TODO: optimize so that you're creating a proxy object only when needed + // TODO: Note: when sandboxing contracts, contracts may not have + // access to the state directly, meaning that modifications would need + // to be re-applied + state = state || contract.state(contractID) + const gProxy = gettersProxy(state, contract.getters) + // These `await` are here to help with sandboxing in the future + // Sandboxing may mean that contracts are executed in another context + // (e.g., a worker), which would require asynchronous communication + // between Chelonia and the contract. + // Even though these are asynchronous calls, contracts should not + // call side effects from these functions + await contract.metadata.validate(meta, { state, ...gProxy, contractID }) + + await contract.actions[action].validate(data, { state, ...gProxy, meta, message, contractID }) + // it's possible that the sideEffect stack got filled up by the call to `processMessage` from + // a call to `publishEvent` (when an outgoing message is being sent). + this.sideEffectStacks[contractID] = [] + await contract.actions[action].process(message, { state, ...gProxy }) + }, + // 'mutation' is an object that's similar to 'message', but not identical + [`${contract.manifest}/${action}/sideEffect`]: async (mutation: ChelContractSideeffectMutationObject, state: ChelContractState) => { + if (contract.actions[action].sideEffect) { + state = state || contract.state(mutation.contractID) + if (!state) { + console.warn(`[${contract.manifest}/${action}/sideEffect]: Skipping side-effect since there is no contract state for contract ${mutation.contractID}`) + return + } + // TODO: Copy to simulate a sandbox boundary without direct access + // as well as to enforce the rule that side-effects must not mutate + // state + const stateCopy = cloneDeep(state) + const gProxy = gettersProxy(stateCopy, contract.getters) + await contract.actions[action].sideEffect!(mutation, { state: stateCopy, ...gProxy }) + } + // since both /process and /sideEffect could call /pushSideEffect, we make sure + // to process the side effects on the stack after calling /sideEffect. + const sideEffects = this.sideEffectStack(mutation.contractID) + while (sideEffects.length > 0) { + const sideEffect = sideEffects.shift() + try { + await contract.sbp(...sideEffect!) + } catch (e_) { + const e = e_ as Error + console.error(`[chelonia] ERROR: '${e.name}' ${e.message}, for pushed sideEffect of ${mutation.description}:`, sideEffect) + this.sideEffectStacks[mutation.contractID] = [] // clear the side effects + throw e + } + } + } + }) as string[]) + } + for (const method in contract.methods) { + this.defContractSelectors.push(...sbp('sbp/selectors/register', { + [`${contract.manifest}/${method}`]: contract.methods[method] + })) + } + sbp('okTurtles.events/emit', CONTRACT_REGISTERED, contract) + }, + 'chelonia/queueInvocation': (contractID: string, sbpInvocation: + Parameters) => { + // We maintain two queues, contractID, used for internal events (i.e., + // from chelonia) and public:contractID, used for operations that need to + // be done after all the current internal events (if any) have + // finished processing. + // Once all of the current internal events (in the contractID queue) + // have completed, the operation requested is put into the public queue. + // The reason for maintaining two different queues is to provide users + // a way to run operations after internal operations have been processed + // (for example, a side-effect might call queueInvocation to do work + // after the current and future events have been processed), without the + // work in these user-functions blocking Chelonia and prventing it from + // processing events. + // For example, a contract could have an action called + // 'example/setProfilePicture'. The side-effect could look like this: + // + // sideEffect ({ data, contractID }, { state }) { + // const profilePictureUrl = data.url + // + // sbp('chelonia/queueInvocation', contractID, () => { + // const rootState = sbp('state/vuex/state') + // if (rootState[contractID].profilePictureUrl !== profilePictureUrl) + // return // The profile picture changed, so we do nothing + // + // // The following could take a long time. We want Chelonia + // // to still work and process events as normal. + // return this.config.fetch(profilePictureUrl).then(doSomeWorkWithTheFile) + // }) + // } + return sbp('chelonia/private/queueEvent', contractID, ['chelonia/private/noop']).then(() => sbp('chelonia/private/queueEvent', 'public:' + contractID, sbpInvocation)) + }, + 'chelonia/begin': async (...invocations: Parameters[]) => { + for (const invocation of invocations) { + await sbp(...invocation) + } + }, + // call this manually to resubscribe/unsubscribe from contracts as needed + // if you are using a custom stateSelector and reload the state (e.g. upon login) + 'chelonia/pubsub/update': function (this: CheloniaContext) { + const client = this.pubsub + const subscribedIDs = [...client.subscriptionSet] + const currentIDs = Array.from(this.subscriptionSet) + const leaveSubscribed = intersection(subscribedIDs, currentIDs) + const toUnsubscribe = difference(subscribedIDs, leaveSubscribed) + const toSubscribe = difference(currentIDs, leaveSubscribed) + // There is currently no need to tell other clients about our sub/unsubscriptions. + try { + for (const contractID of toUnsubscribe) { + client.unsub(contractID) + } + for (const contractID of toSubscribe) { + client.sub(contractID) + } + } catch (e) { + console.error(`[chelonia] pubsub/update: error ${(e as Error).name}: ${(e as Error).message}`, { toUnsubscribe, toSubscribe }, e) + this.config.hooks.pubsubError?.(e, client) + } + }, + // resolves when all pending actions for these contractID(s) finish + 'chelonia/contract/wait': function (this: CheloniaContext, contractIDs?: string | string[]): Promise { + const listOfIds = contractIDs + ? (typeof contractIDs === 'string' ? [contractIDs] : contractIDs) + : Object.keys(sbp(this.config.stateSelector).contracts) + return Promise.all(listOfIds.flatMap(cID => { + return sbp('chelonia/queueInvocation', cID, ['chelonia/private/noop']) + })) + }, + // resolves when all pending *writes* for these contractID(s) finish + 'chelonia/contract/waitPublish': function (this: CheloniaContext, contractIDs?: string | string[]): Promise { + const listOfIds = contractIDs + ? (typeof contractIDs === 'string' ? [contractIDs] : contractIDs) + : Object.keys(sbp(this.config.stateSelector).contracts) + return Promise.all(listOfIds.flatMap(cID => { + return sbp('chelonia/private/queueEvent', `publish:${cID}`, ['chelonia/private/noop']) + })) + }, + // 'chelonia/contract' - selectors related to injecting remote data and monitoring contracts + // TODO: add an optional parameter to "retain" the contract (see #828) + // eslint-disable-next-line require-await + 'chelonia/contract/sync': async function (this: CheloniaContext, contractIDs: string | string[], params?: { resync?: boolean }): Promise { + // The exposed `chelonia/contract/sync` selector is meant for users of + // Chelonia and not for internal use within Chelonia. + // It should only be called after `/retain` where needed (for example, when + // starting up Chelonia with a saved state) + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs + // Verify that there's a valid reference count + listOfIds.forEach((id) => { + if (checkCanBeGarbageCollected.call(this, id)) { + if (process.env.CI) { + Promise.reject(new Error('[chelonia] Missing reference count for contract ' + id)) + } + console.error('[chelonia] Missing reference count for contract ' + id) + throw new Error('Missing reference count for contract') + } + }) + // Call the internal sync selector. `force` is always true as using `/sync` + // besides internally is only needed to force sync a contract + return sbp('chelonia/private/out/sync', listOfIds, { ...params, force: true }) + }, + 'chelonia/contract/isSyncing': function (this: CheloniaContext, contractID: string, { firstSync = false } = {}): boolean { + const isSyncing = !!this.currentSyncs[contractID] + return firstSync + ? isSyncing && this.currentSyncs[contractID].firstSync + : isSyncing + }, + 'chelonia/contract/currentSyncs': function (this: CheloniaContext) { + return Object.keys(this.currentSyncs) + }, + // Because `/remove` is done asynchronously and a contract might be removed + // much later than when the call to remove was made, an optional callback + // can be passed to verify whether to proceed with removal. This is used as + // part of the `/release` mechanism to prevent removing contracts that have + // acquired new references since the call to `/remove`. + 'chelonia/contract/remove': function ( + this: CheloniaContext, + contractIDs: string | string[], + { confirmRemovalCallback, permanent }: { + confirmRemovalCallback?: (contractID: string) => boolean, + permanent?: boolean + } = {} + ): Promise { + const rootState = sbp(this.config.stateSelector) + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs + return Promise.all(listOfIds.map(contractID => { + if (!rootState?.contracts?.[contractID]) { + return undefined + } + + return sbp('chelonia/private/queueEvent', contractID, () => { + // This allows us to double-check that the contract is meant to be + // removed, as circumstances could have changed from the time remove + // was called and this function is executed. For example, `/release` + // makes a synchronous check, but processing of other events since + // require this to be re-checked (in this case, for reference counts). + if (confirmRemovalCallback && !confirmRemovalCallback(contractID)) { + return + } + const rootState = sbp(this.config.stateSelector) + const fkContractIDs = Array.from(new Set(Object.values((rootState[contractID] as ChelContractState)?._vm?.authorizedKeys ?? {}).filter((k) => { + return !!k.foreignKey + }).map((k) => { + try { + const fkUrl = new URL(k.foreignKey!) + return fkUrl.pathname + } catch { + return undefined + } + }).filter(Boolean))) + + sbp('chelonia/private/removeImmediately', contractID, { permanent }) + + if (fkContractIDs.length) { + // Attempt to release all contracts that are being monitored for + // foreign keys + sbp('chelonia/contract/release', fkContractIDs, { try: true }).catch((e: unknown) => { + console.error('[chelonia] Error attempting to release foreign key contracts', e) + }) + } + }) + })) + }, + 'chelonia/contract/retain': async function (this: CheloniaContext, contractIDs: string | string[], params?: { ephemeral?: boolean}): Promise { + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs + const rootState = sbp(this.config.stateSelector) + if (listOfIds.length === 0) return Promise.resolve() + const checkIfDeleted = (id: string) => { + // Contract has been permanently deleted + if (rootState.contracts[id] === null) { + console.error('[chelonia/contract/retain] Called /retain on permanently deleted contract.', id) + throw new ChelErrorResourceGone('Unable to retain permanently deleted contract ' + id) + } + } + if (!params?.ephemeral) { + listOfIds.forEach((id) => { + checkIfDeleted(id) + if (!has(rootState.contracts, id)) { + this.config.reactiveSet(rootState.contracts, id, Object.create(null)) + } + this.config.reactiveSet(rootState.contracts[id], 'references', (rootState.contracts[id].references ?? 0) + 1) + }) + } else { + listOfIds.forEach((id) => { + checkIfDeleted(id) + if (!has(this.ephemeralReferenceCount, id)) { + this.ephemeralReferenceCount[id] = 1 + } else { + this.ephemeralReferenceCount[id] = this.ephemeralReferenceCount[id] + 1 + } + }) + } + return await sbp('chelonia/private/out/sync', listOfIds) + }, + // the `try` parameter does not affect (ephemeral or persistent) reference + // counts, but rather removes a contract if the reference count is zero + // and the contract isn't being monitored for foreign keys. This parameter + // is meant mostly for internal chelonia use, so that removing or releasing + // a contract can also remove other contracts that this first contract + // was monitoring. + 'chelonia/contract/release': async function (this: CheloniaContext, contractIDs: string | string[], params?: { ephemeral?: boolean, try?: boolean }): Promise { + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs + const rootState = sbp(this.config.stateSelector) + if (!params?.try) { + if (!params?.ephemeral) { + listOfIds.forEach((id) => { + // Contract has been permanently deleted + if (rootState.contracts[id] === null) { + console.warn('[chelonia/contract/release] Called /release on permanently deleted contract. This has no effect.', id) + return + } + if (has(rootState.contracts, id) && has(rootState.contracts[id], 'references')) { + const current = rootState.contracts[id].references + if (current === 0) { + console.error('[chelonia/contract/release] Invalid negative reference count for', id) + if (process.env.CI) { + // If running in CI, force tests to fail + Promise.reject(new Error('Invalid negative reference count: ' + id)) + } + throw new Error('Invalid negative reference count') + } + if (current <= 1) { + this.config.reactiveDel(rootState.contracts[id], 'references') + } else { + this.config.reactiveSet(rootState.contracts[id], 'references', current - 1) + } + } else { + console.error('[chelonia/contract/release] Invalid negative reference count for', id) + if (process.env.CI) { + // If running in CI, force tests to fail + Promise.reject(new Error('Invalid negative reference count: ' + id)) + } + throw new Error('Invalid negative reference count') + } + }) + } else { + listOfIds.forEach((id) => { + // Contract has been permanently deleted + if (rootState.contracts[id] === null) { + console.warn('[chelonia/contract/release] Called /release on permanently deleted contract. This has no effect.', id) + return + } + if (has(this.ephemeralReferenceCount, id)) { + const current = this.ephemeralReferenceCount[id] ?? 0 + if (current <= 1) { + delete this.ephemeralReferenceCount[id] + } else { + this.ephemeralReferenceCount[id] = current - 1 + } + } else { + console.error('[chelonia/contract/release] Invalid negative ephemeral reference count for', id) + if (process.env.CI) { + // If running in CI, force tests to fail + Promise.reject(new Error('Invalid negative ephemeral reference count: ' + id)) + } + throw new Error('Invalid negative ephemeral reference count') + } + }) + } + } + + // This function will be called twice. The first time, it provides a list of + // candidate contracts to remove. The second time, it confirms that the + // contract is safe to remove + const boundCheckCanBeGarbageCollected = checkCanBeGarbageCollected.bind(this) + const idsToRemove = listOfIds.filter(boundCheckCanBeGarbageCollected) + return idsToRemove.length ? await sbp('chelonia/contract/remove', idsToRemove, { confirmRemovalCallback: boundCheckCanBeGarbageCollected }) : undefined + }, + 'chelonia/contract/disconnect': async function (this: CheloniaContext, contractID: string, contractIDToDisconnect: string) { + const state = sbp(this.config.stateSelector) + const contractState = state[contractID] as ChelContractState + + const keyIds = Object.values(contractState._vm.authorizedKeys).filter((k) => { + return k._notAfterHeight == null && k.meta?.keyRequest?.contractID === contractIDToDisconnect + }).map(k => k.id) + + if (!keyIds.length) return + + return await sbp('chelonia/out/keyDel', { + contractID, + contractName: contractState._vm.type, + data: keyIds, + signingKeyId: findSuitableSecretKeyId(contractState, [SPMessage.OP_KEY_DEL], ['sig']) + }) + }, + 'chelonia/in/processMessage': function (this: CheloniaContext, messageOrRawMessage: SPMessage | string, state: ChelContractState) { + const stateCopy = cloneDeep(state) + const message = typeof messageOrRawMessage === 'string' ? SPMessage.deserialize(messageOrRawMessage, this.transientSecretKeys, stateCopy, this.config.unwrapMaybeEncryptedData) : messageOrRawMessage + return sbp('chelonia/private/in/processMessage', message, stateCopy).then(() => stateCopy).catch((e: unknown) => { + console.warn(`chelonia/in/processMessage: reverting mutation ${message.description()}: ${message.serialize()}`, e) + return state + }) + }, + 'chelonia/out/fetchResource': async function (this: CheloniaContext, cid: string, { code }: { code?: number } = {}) { + const parsedCID = parseCID(cid) + if (code != null) { + if (parsedCID.code !== code) { + throw new Error(`Invalid CID content type. Expected ${code}, got ${parsedCID.code}`) + } + } + // Note that chelonia.db/get (set) is a no-op for lightweight clients + // This was added for consistency (processing an event also adds it to the DB) + const local = await sbp('chelonia.db/get', cid) + // We don't verify the CID because it's already been verified when it was set + if (local != null) return local + const url = `${this.config.connectionURL}/file/${cid}` + const data = await this.config.fetch(url, { signal: this.abortController.signal }).then(handleFetchResult('text')) as unknown as string + const ourHash = createCID(data, parsedCID.code) + if (ourHash !== cid) { + throw new Error(`expected hash ${cid}. Got: ${ourHash}`) + } + await sbp('chelonia.db/set', cid, data) + return data + }, + 'chelonia/out/latestHEADInfo': function (this: CheloniaContext, contractID: string) { + return this.config.fetch(`${this.config.connectionURL}/latestHEADinfo/${contractID}`, { + cache: 'no-store', + signal: this.abortController.signal + }).then(handleFetchResult('json')) + }, + 'chelonia/out/eventsAfter': eventsAfter, + 'chelonia/out/eventsBefore': function (this: CheloniaContext, contractID: string, { beforeHeight, limit, stream }: { beforeHeight: number, limit: number, stream: boolean }) { + if (limit <= 0) { + console.error('[chelonia] invalid params error: "limit" needs to be positive integer') + } + const offset = Math.max(0, beforeHeight - limit + 1) + const eventsAfterLimit = Math.min(beforeHeight + 1, limit) + return sbp('chelonia/out/eventsAfter', contractID, { sinceHeight: offset, limit: eventsAfterLimit, stream }) + }, + 'chelonia/out/eventsBetween': function (this: CheloniaContext, contractID: string, { startHash, endHeight, offset = 0, limit = 0, stream = true }: { startHash: string, endHeight: number, offset?: number, limit: number, stream: boolean }) { + if (offset < 0) { + console.error('[chelonia] invalid params error: "offset" needs to be positive integer or zero') + return + } + let reader: ReadableStreamDefaultReader + const s = new ReadableStream({ + start: async (controller) => { + const first = await this.config.fetch(`${this.config.connectionURL}/file/${startHash}`, { signal: this.abortController.signal }).then(handleFetchResult('text')) as unknown as string + const deserializedHEAD = SPMessage.deserializeHEAD(first) + if (deserializedHEAD.contractID !== contractID) { + controller.error(new Error('chelonia/out/eventsBetween: Mismatched contract ID')) + return + } + const startOffset = Math.max(0, deserializedHEAD.head.height - offset) + const ourLimit = limit ? Math.min(endHeight - startOffset + 1, limit) : endHeight - startOffset + 1 + if (ourLimit < 1) { + controller.close() + return + } + reader = sbp('chelonia/out/eventsAfter', contractID, { sinceHeight: startOffset, limit: ourLimit }).getReader() + }, + async pull (controller) { + const { done, value } = await reader.read() + if (done) { + controller.close() + } else { + controller.enqueue(value) + } + } + }) + + if (stream) return s + // Workaround for + return collectEventStream(s) + }, + 'chelonia/rootState': function (this: CheloniaContext) { return sbp(this.config.stateSelector) }, + 'chelonia/latestContractState': async function (this: CheloniaContext, contractID: string, options = { forceSync: false }) { + const rootState = sbp(this.config.stateSelector) + // return a copy of the state if we already have it, unless the only key that's in it is _volatile, + // in which case it means we should sync the contract to get more info. + if (rootState.contracts[contractID] === null) { + throw new ChelErrorResourceGone('Permanently deleted contract ' + contractID) + } + if (!options.forceSync && rootState[contractID] && Object.keys(rootState[contractID]).some((x) => x !== '_volatile')) { + return cloneDeep(rootState[contractID]) + } + let state = Object.create(null) + let contractName = rootState.contracts[contractID]?.type + const eventsStream = sbp('chelonia/out/eventsAfter', contractID, { sinceHeight: 0, sinceHash: contractID }) + const eventsStreamReader = eventsStream.getReader() + if (rootState[contractID]) state._volatile = rootState[contractID]._volatile + for (;;) { + const { value: event, done } = await eventsStreamReader.read() + if (done) return state + const stateCopy = cloneDeep(state) + try { + await sbp('chelonia/private/in/processMessage', SPMessage.deserialize(event, this.transientSecretKeys, state, this.config.unwrapMaybeEncryptedData), state, undefined, contractName) + if (!contractName && state._vm) { + contractName = state._vm.type + } + } catch (e) { + console.warn(`[chelonia] latestContractState: '${(e as Error).name}': ${(e as Error).message} processing:`, event, (e as Error).stack) + if (e instanceof ChelErrorUnrecoverable) throw e + state = stateCopy + } + } + }, + 'chelonia/contract/state': function (this: CheloniaContext, contractID: string, height?: number | null | undefined) { + const state = sbp(this.config.stateSelector)[contractID] + const stateCopy = state && cloneDeep(state) + if (stateCopy?._vm && height != null) { + // Remove keys in the future + Object.keys(stateCopy._vm.authorizedKeys).forEach(keyId => { + if (stateCopy._vm.authorizedKeys[keyId]._notBeforeHeight > height) { + delete stateCopy._vm.authorizedKeys[keyId] + } + }) + } + return stateCopy + }, + 'chelonia/contract/fullState': function (this: CheloniaContext, contractID: string | string[]) { + const rootState = sbp(this.config.stateSelector) + if (Array.isArray(contractID)) { + return Object.fromEntries(contractID.map(contractID => { + return [ + contractID, + { + contractState: rootState[contractID], + cheloniaState: rootState.contracts[contractID] + } + ] + })) + } + return { + contractState: rootState[contractID], + cheloniaState: rootState.contracts[contractID] + } + }, + // 'chelonia/out' - selectors that send data out to the server + 'chelonia/out/registerContract': async function (this: CheloniaContext, params: ChelRegParams) { + const { contractName, keys, hooks, publishOptions, signingKeyId, actionSigningKeyId, actionEncryptionKeyId } = params + const manifestHash = this.config.contracts.manifests[contractName] + const contractInfo = this.manifestToContract[manifestHash] + if (!contractInfo) throw new Error(`contract not defined: ${contractName}`) + const signingKey = this.transientSecretKeys[signingKeyId] + if (!signingKey) throw new Error(`Signing key ${signingKeyId} is not defined`) + const payload = { + type: contractName, + keys + } as SPOpContract + const contractMsg = SPMessage.createV1_0({ + contractID: null, + height: 0, + op: [ + SPMessage.OP_CONTRACT, + signedOutgoingDataWithRawKey(signingKey, payload) + ], + manifest: manifestHash + }) + const contractID = contractMsg.hash() + await sbp('chelonia/private/out/publishEvent', contractMsg, (params.namespaceRegistration + ? { + ...publishOptions, + headers: { + ...publishOptions?.headers, + 'shelter-namespace-registration': params.namespaceRegistration + } + } + : publishOptions), hooks && { + prepublish: hooks.prepublishContract, + postpublish: hooks.postpublishContract + }) + await sbp('chelonia/private/out/sync', contractID) + const msg = await sbp(actionEncryptionKeyId + ? 'chelonia/out/actionEncrypted' + : 'chelonia/out/actionUnencrypted', { + action: contractName, + contractID, + data: params.data, + signingKeyId: actionSigningKeyId ?? signingKeyId, + encryptionKeyId: actionEncryptionKeyId, + hooks, + publishOptions + }) + return msg + }, + 'chelonia/out/ownResources': async function (this: CheloniaContext, contractID: string) { + if (!contractID) { + throw new TypeError('A contract ID must be provided') + } + + const response = await this.config.fetch(`${this.config.connectionURL}/ownResources`, { + method: 'GET', + signal: this.abortController.signal, + headers: new Headers([ + [ + 'authorization', + buildShelterAuthorizationHeader.call(this, contractID) + ] + ]) + }) + if (!response.ok) { + console.error('Unable to fetch own resources', contractID, response.status) + throw new Error(`Unable to fetch own resources for ${contractID}: ${response.status}`) + } + + return response.json() + }, + 'chelonia/out/deleteContract': async function ( + this: CheloniaContext, + contractID: string | string[], + credentials: { + [contractID: string]: { token?: string, billableContractID?: string } + } = {} + ) { + if (!contractID) { + throw new TypeError('A contract ID must be provided') + } + if (!Array.isArray(contractID)) contractID = [contractID] + return await Promise.allSettled(contractID.map(async (cid) => { + const hasCredential = has(credentials, cid) + const hasToken = has(credentials[cid], 'token') && credentials[cid].token + const hasBillableContractID = has(credentials[cid], 'billableContractID') && credentials[cid].billableContractID + if (!hasCredential || hasToken === hasBillableContractID) { + throw new TypeError(`Either a token or a billable contract ID must be provided for ${cid}`) + } + + const response = await this.config.fetch(`${this.config.connectionURL}/deleteContract/${cid}`, { + method: 'POST', + signal: this.abortController.signal, + headers: new Headers([ + ['authorization', + hasToken + ? `bearer ${credentials[cid].token!.valueOf()}` + : buildShelterAuthorizationHeader.call(this, credentials[cid].billableContractID!)] + ]) + }) + if (!response.ok) { + if (response.status === 404 || response.status === 410) { + console.warn('Contract appears to have been deleted already', cid, response.status) + return + } + console.error('Unable to delete contract', cid, response.status) + throw new Error(`Unable to delete contract ${cid}: ${response.status}`) + } + })) + }, + // all of these functions will do both the creation of the SPMessage + // and the sending of it via 'chelonia/private/out/publishEvent' + 'chelonia/out/actionEncrypted': function (this: CheloniaContext, params: ChelActionParams): Promise { + return outEncryptedOrUnencryptedAction.call(this, SPMessage.OP_ACTION_ENCRYPTED, params) + }, + 'chelonia/out/actionUnencrypted': function (this: CheloniaContext, params: ChelActionParams): Promise { + return outEncryptedOrUnencryptedAction.call(this, SPMessage.OP_ACTION_UNENCRYPTED, params) + }, + 'chelonia/out/keyShare': async function (this: CheloniaContext, params: ChelKeyShareParams): Promise { + const { atomic, originatingContractName, originatingContractID, contractName, contractID, data, hooks, publishOptions } = params + const originatingManifestHash = this.config.contracts.manifests[originatingContractName!] + const destinationManifestHash = this.config.contracts.manifests[contractName] + const originatingContract = originatingContractID ? this.manifestToContract[originatingManifestHash]?.contract : undefined + const destinationContract = this.manifestToContract[destinationManifestHash]?.contract + + if ((originatingContractID && !originatingContract) || !destinationContract) { + throw new Error('Contract name not found') + } + + const payload = data as SPOpKeyShare + + if (!params.signingKeyId && !params.signingKey) { + throw new TypeError('Either signingKeyId or signingKey must be specified') + } + + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_SHARE, + params.signingKeyId + ? signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + : signedOutgoingDataWithRawKey(params.signingKey!, payload) + ], + manifest: destinationManifestHash + }) + if (!atomic) { + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, hooks) + } + return msg + }, + 'chelonia/out/keyAdd': async function (this: CheloniaContext, params: ChelKeyAddParams): Promise { + // TODO: For foreign keys, recalculate the key id + // TODO: Make this a noop if the key already exsits with the given permissions + const { atomic, contractID, contractName, data, hooks, publishOptions } = params + const manifestHash = this.config.contracts.manifests[contractName] + const contract = this.manifestToContract[manifestHash]?.contract + if (!contract) { + throw new Error('Contract name not found') + } + const state = contract.state(contractID) + + const payload = (data as SPOpKeyAdd).filter((wk) => { + const k = ((isEncryptedData(wk) ? wk.valueOf() : wk) as SPKey) + if (has(state._vm.authorizedKeys, k.id)) { + if (state._vm.authorizedKeys[k.id]._notAfterHeight == null) { + // Can't add a key that exists + return false + } + } + + return true + }) + if (payload.length === 0) return + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_ADD, + signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }) + if (!atomic) { + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, hooks) + } + return msg + }, + 'chelonia/out/keyDel': async function (this: CheloniaContext, params: ChelKeyDelParams): Promise { + const { atomic, contractID, contractName, data, hooks, publishOptions } = params + const manifestHash = this.config.contracts.manifests[contractName] + const contract = this.manifestToContract[manifestHash]?.contract + if (!contract) { + throw new Error('Contract name not found') + } + const state = contract.state(contractID) + const payload = (data as SPOpKeyDel).map((keyId) => { + if (isEncryptedData(keyId)) return keyId + if (!has(state._vm.authorizedKeys, keyId) || state._vm.authorizedKeys[keyId]._notAfterHeight != null) return undefined + if (state._vm.authorizedKeys[keyId]._private) { + return encryptedOutgoingData(contractID, state._vm.authorizedKeys[keyId]._private!, keyId) + } else { + return keyId + } + }).filter(Boolean) + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_DEL, + signedOutgoingData(contractID, params.signingKeyId, payload as SPOpValue, this.transientSecretKeys) + ], + manifest: manifestHash + }) + if (!atomic) { + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, hooks) + } + return msg + }, + 'chelonia/out/keyUpdate': async function (this: CheloniaContext, params: ChelKeyUpdateParams): Promise { + const { atomic, contractID, contractName, data, hooks, publishOptions } = params + const manifestHash = this.config.contracts.manifests[contractName] + const contract = this.manifestToContract[manifestHash]?.contract + if (!contract) { + throw new Error('Contract name not found') + } + const state = contract.state(contractID) + const payload = (data as SPOpKeyUpdate).map((key) => { + if (isEncryptedData(key)) return key + const { oldKeyId } = key + if (state._vm.authorizedKeys[oldKeyId]._private) { + return encryptedOutgoingData(contractID, state._vm.authorizedKeys[oldKeyId]._private!, key) + } else { + return key + } + }) + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_UPDATE, + signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }) + if (!atomic) { + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, hooks) + } + return msg + }, + 'chelonia/out/keyRequest': async function (this: CheloniaContext, params: ChelKeyRequestParams): Promise { + const { originatingContractID, originatingContractName, contractID, contractName, hooks, publishOptions, innerSigningKeyId, encryptionKeyId, innerEncryptionKeyId, encryptKeyRequestMetadata, reference } = params + // `encryptKeyRequestMetadata` is optional because it could be desirable + // sometimes to allow anyone to audit OP_KEY_REQUEST and OP_KEY_SHARE + // operations. If `encryptKeyRequestMetadata` were always true, it would + // be harder in these situations to see interactions between two contracts. + const manifestHash = this.config.contracts.manifests[contractName] + const originatingManifestHash = this.config.contracts.manifests[originatingContractName] + const contract = this.manifestToContract[manifestHash]?.contract + const originatingContract = this.manifestToContract[originatingManifestHash]?.contract + if (!contract) { + throw new Error('Contract name not found') + } + const rootState = sbp(this.config.stateSelector) + try { + await sbp('chelonia/contract/retain', contractID, { ephemeral: true }) + const state = contract.state(contractID) + const originatingState = originatingContract.state(originatingContractID) + + const havePendingKeyRequest = Object.values(originatingState._vm.authorizedKeys).findIndex((k: ChelContractKey) => { + return k._notAfterHeight == null && k.meta?.keyRequest?.contractID === contractID && state?._volatile?.pendingKeyRequests?.some(pkr => pkr.name === k.name) + }) !== -1 + + // If there's a pending key request for this contract, return + if (havePendingKeyRequest) { + return + } + + const keyRequestReplyKey = keygen(EDWARDS25519SHA512BATCH) + const keyRequestReplyKeyId = keyId(keyRequestReplyKey) + const keyRequestReplyKeyP = serializeKey(keyRequestReplyKey, false) + const keyRequestReplyKeyS = serializeKey(keyRequestReplyKey, true) + + const signingKeyId = findSuitableSecretKeyId(originatingState, [SPMessage.OP_KEY_ADD], ['sig']) + if (!signingKeyId) { + throw ChelErrorUnexpected(`Unable to send key request. Originating contract is missing a key with OP_KEY_ADD permission. contractID=${contractID} originatingContractID=${originatingContractID}`) + } + const keyAddOp = () => sbp('chelonia/out/keyAdd', { + contractID: originatingContractID, + contractName: originatingContractName, + data: [{ + id: keyRequestReplyKeyId, + name: '#krrk-' + keyRequestReplyKeyId, + purpose: ['sig'], + ringLevel: Number.MAX_SAFE_INTEGER, + permissions: params.permissions === '*' + ? '*' + : Array.isArray(params.permissions) + ? [...params.permissions, SPMessage.OP_KEY_SHARE] + : [SPMessage.OP_KEY_SHARE], + allowedActions: params.allowedActions, + meta: { + private: { + content: encryptedOutgoingData(originatingContractID, encryptionKeyId, keyRequestReplyKeyS), + shareable: false + }, + keyRequest: { + ...(reference && { reference: encryptKeyRequestMetadata ? encryptedOutgoingData(originatingContractID, encryptionKeyId, reference) : reference }), + contractID: encryptKeyRequestMetadata ? encryptedOutgoingData(originatingContractID, encryptionKeyId, contractID) : contractID + } + }, + data: keyRequestReplyKeyP + }], + signingKeyId + }).catch((e: Error) => { + console.error(`[chelonia] Error sending OP_KEY_ADD for ${originatingContractID} during key request to ${contractID}`, e) + throw e + }) + const payload = ({ + contractID: originatingContractID, + height: rootState.contracts[originatingContractID].height, + replyWith: signedOutgoingData(originatingContractID, innerSigningKeyId, { + encryptionKeyId, + responseKey: encryptedOutgoingData(contractID, innerEncryptionKeyId, keyRequestReplyKeyS) + }, this.transientSecretKeys), + request: '*' + } as SPOpKeyRequest) + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_REQUEST, + signedOutgoingData(contractID, params.signingKeyId, + encryptKeyRequestMetadata + ? encryptedOutgoingData(contractID, innerEncryptionKeyId, payload) as SPOpValue + : payload, + this.transientSecretKeys + ) + ], + manifest: manifestHash + }) + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, { + ...hooks, + // We ensure that both messages are placed into the publish queue + prepublish: (...args: unknown[]) => { + return keyAddOp().then(() => (hooks?.prepublish as unknown as undefined | { (...args: unknown[]): void })?.(...args)) + } + }) + return msg + } finally { + await sbp('chelonia/contract/release', contractID, { ephemeral: true }) + } + }, + 'chelonia/out/keyRequestResponse': async function (this: CheloniaContext, params: ChelKeyRequestResponseParams): Promise { + const { atomic, contractID, contractName, data, hooks, publishOptions } = params + const manifestHash = this.config.contracts.manifests[contractName] + const contract = this.manifestToContract[manifestHash]?.contract + if (!contract) { + throw new Error('Contract name not found') + } + const payload = data + let message = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_KEY_REQUEST_SEEN, + signedOutgoingData(contractID, params.signingKeyId, payload, this.transientSecretKeys) + ], + manifest: manifestHash + }) + if (!atomic) { + message = await sbp('chelonia/private/out/publishEvent', message, publishOptions, hooks) + } + return message + }, + 'chelonia/out/atomic': async function (this: CheloniaContext, params: ChelAtomicParams): Promise { + const { contractID, contractName, data, hooks, publishOptions } = params + const manifestHash = this.config.contracts.manifests[contractName] + const contract = this.manifestToContract[manifestHash]?.contract + if (!contract) { + throw new Error('Contract name not found') + } + const payload = (await Promise.all(data.map(([selector, opParams]) => { + if (!['chelonia/out/actionEncrypted', 'chelonia/out/actionUnencrypted', 'chelonia/out/keyAdd', 'chelonia/out/keyDel', 'chelonia/out/keyUpdate', 'chelonia/out/keyRequestResponse', 'chelonia/out/keyShare'].includes(selector)) { + throw new Error('Selector not allowed in OP_ATOMIC: ' + selector) + } + return sbp(selector, { ...opParams, ...params, data: (opParams as ChelActionParams).data, atomic: true }) + }))).flat().filter(Boolean).map((msg) => { + return [msg.opType(), msg.opValue()] + }) + let msg = SPMessage.createV1_0({ + contractID, + op: [ + SPMessage.OP_ATOMIC, + signedOutgoingData(contractID, params.signingKeyId, payload as SPOpValue, this.transientSecretKeys) + ], + manifest: manifestHash + }) + msg = await sbp('chelonia/private/out/publishEvent', msg, publishOptions, hooks) + return msg + }, + 'chelonia/out/protocolUpgrade': async function () { + + }, + 'chelonia/out/propSet': async function () { + + }, + 'chelonia/out/propDel': async function () { + + }, + 'chelonia/out/encryptedOrUnencryptedPubMessage': function (this: CheloniaContext, { + contractID, + innerSigningKeyId, + encryptionKeyId, + signingKeyId, + data + }: { + contractID: string, + innerSigningKeyId?: string | null | undefined, + encryptionKeyId?: string | null | undefined, + signingKeyId: string, + data: JSONType + }) { + const serializedData = outputEncryptedOrUnencryptedMessage.call(this, { + contractID, + innerSigningKeyId, + encryptionKeyId, + signingKeyId, + data + }) + this.pubsub.pub(contractID, serializedData) + }, + // Note: This is a bare-bones function designed for precise control. In many + // situations, the `chelonia/kv/queuedSet` selector (in chelonia-utils.js) + // will be simpler and more appropriate to use. + // In most situations, you want to use some queuing strategy (which this + // selector doesn't provide) alongside writing to the KV store. Therefore, as + // a general rule, you shouldn't be calling this selector directly unless + // you're building a utility library or if you have very specific needs. In + // this case, see if `chelonia/kv/queuedSet` covers your needs. + // `data` is allowed to be falsy, in which case a fetch will occur first and + // the `onconflict` handler will be called. + 'chelonia/kv/set': async function (this: CheloniaContext, contractID: string, key: string, data: JSONType | undefined, { + ifMatch, + innerSigningKeyId, + encryptionKeyId, + signingKeyId, + maxAttempts, + onconflict + }: { + ifMatch?: string, + innerSigningKeyId?: string | null | undefined, + encryptionKeyId?: string | null | undefined, + signingKeyId: string, + maxAttempts?: number | null | undefined, + onconflict?: ChelKvOnConflictCallback | null | undefined, + }) { + maxAttempts = maxAttempts ?? 3 + const url = `${this.config.connectionURL}/kv/${encodeURIComponent(contractID)}/${encodeURIComponent(key)}` + const hasOnconflict = typeof onconflict === 'function' + + let response: Response + // The `resolveData` function is tasked with computing merged data, as in + // merging the existing stored values (after a conflict or initial fetch) + // and new data. The return value indicates whether there should be a new + // attempt at storing updated data (if `true`) or not (if `false`) + const resolveData = async () => { + let currentValue: ParsedEncryptedOrUnencryptedMessage | undefined + // Rationale: + // * response.ok could be the result of `GET` (no initial data) + // * 409 indicates a conflict because the height used is too old + // * 412 indicates a conflict (precondition failed) because the data + // on the KV store have been updated / is not what we expected + // All of these situations should trigger parsing the respinse and + // conlict resolution + if (response.ok || response.status === 409 || response.status === 412) { + const serializedDataText = await response.text() + // We can get 409 even if there's no data on the server. We still need + // to call `onconflict` in this case, but we don't need to attempt to + // parse the response. + // This prevents this from failing in such cases, which can result in + // race conditions and data not being properly initialised. + // See + currentValue = serializedDataText + ? parseEncryptedOrUnencryptedMessage(this, { + contractID, + serializedData: JSON.parse(serializedDataText), + meta: key + }) + : undefined + // Rationale: 404 and 410 both indicate that the store key doesn't exist. + // These are not treated as errors since we could still set the value. + } else if (response.status !== 404 && response.status !== 410) { + throw new ChelErrorUnexpectedHttpResponseCode('[kv/set] Invalid response code: ' + response.status) + } + const result = await onconflict!({ + contractID, + key, + failedData: data, + status: response.status, + // If no x-cid or etag header was returned, `ifMatch` would likely be + // returned as undefined, which will then use the `''` fallback value + // when writing. This allows 404 / 410 responses to work even if no + // etag is explicitly given + etag: response.headers.get('x-cid') || response.headers.get('etag'), + get currentData () { + return currentValue?.data + }, + currentValue + }) + if (!result) return false + + data = result[0] + ifMatch = result[1] + return true + } + + for (;;) { + if (data !== undefined) { + const serializedData = outputEncryptedOrUnencryptedMessage.call(this, { + contractID, + innerSigningKeyId, + encryptionKeyId, + signingKeyId, + data: data!, + meta: key + }) + response = await this.config.fetch(url, { + headers: new Headers([[ + 'authorization', buildShelterAuthorizationHeader.call(this, contractID) + ], [ + 'if-match', ifMatch || '""' + ] + ]), + method: 'POST', + body: JSON.stringify(serializedData), + signal: this.abortController.signal + }) + } else { + if (!hasOnconflict) { + throw TypeError('onconflict required with empty data') + } + // If no initial data provided, perform a GET `fetch` to get the current + // data and CID. Then, `onconflict` will be used to merge the current + // and new data. + response = await this.config.fetch(url, { + headers: new Headers([[ + 'authorization', buildShelterAuthorizationHeader.call(this, contractID) + ]]), + signal: this.abortController.signal + }) + + // This is only for the initial case; the logic is replicated below + // for subsequent iterations that require conflic resolution. + if (await resolveData()) { + continue + } else { + break + } + } + if (!response.ok) { + // Rationale: 409 and 412 indicate conflict resolution is needed + if (response.status === 409 || response.status === 412) { + if (--maxAttempts <= 0) { + throw new Error('kv/set conflict setting KV value') + } + // Only retry if an onconflict handler exists to potentially resolve it + await delay(randomIntFromRange(0, 1500)) + if (hasOnconflict) { + if (await resolveData()) { + continue + } else { + break + } + } else { + // Can't resolve automatically if there's no conflict handler + throw new Error(`kv/set failed with status ${response.status} and no onconflict handler was provided`) + } + } + throw new ChelErrorUnexpectedHttpResponseCode('kv/set invalid response status: ' + response.status) + } + break + } + }, + 'chelonia/kv/get': async function (this: CheloniaContext, contractID: string, key: string) { + const response = await this.config.fetch(`${this.config.connectionURL}/kv/${encodeURIComponent(contractID)}/${encodeURIComponent(key)}`, { + headers: new Headers([[ + 'authorization', buildShelterAuthorizationHeader.call(this, contractID) + ]]), + signal: this.abortController.signal + }) + if (response.status === 404) { + return null + } + if (!response.ok) { + throw new Error('Invalid response status: ' + response.status) + } + const data = await response.json() + return parseEncryptedOrUnencryptedMessage(this, { + contractID, + serializedData: data, + meta: key + }) + }, + // To set filters for a contract, call with `filter` set to an array of KV + // keys to receive updates for over the WebSocket. An empty array means that + // no KV updates will be sent. + // Calling with a single argument (the contract ID) will remove filters, + // meaning that KV updates will be sent for _any_ KV key. + // The last call takes precedence, so, for example, calling with filter + // set to `['foo', 'bar']` and then with `['baz']` means that KV updates will + // be received for `baz` only, not for `foo`, `bar` or any other keys. + 'chelonia/kv/setFilter': function (this: CheloniaContext, contractID: string, filter?: string[]) { + this.pubsub.setKvFilter(contractID, filter) + }, + 'chelonia/parseEncryptedOrUnencryptedDetachedMessage': function (this: CheloniaContext, { contractID, serializedData, meta }: { contractID: string, serializedData: { height: string, _signedData: [string, string, string] }, meta?: string | null | undefined }) { + return parseEncryptedOrUnencryptedMessage(this, { + contractID, + serializedData, + meta + }) + } +}) as string[] + +function contractNameFromAction (action: string): string { + const regexResult = ACTION_REGEX.exec(action) + const contractName = regexResult?.[2] + if (!contractName) throw new Error(`Poorly named action '${action}': missing contract name.`) + return contractName +} + +function outputEncryptedOrUnencryptedMessage (this: CheloniaContext, { + contractID, + innerSigningKeyId, + encryptionKeyId, + signingKeyId, + data, + meta +}: { + contractID: string, + innerSigningKeyId?: string | null | undefined, + encryptionKeyId?: string | null | undefined, + signingKeyId: string, + data: JSONType, + meta?: string | null | undefined +}) { + const state = sbp(this.config.stateSelector)[contractID] + const signedMessage = innerSigningKeyId + ? (state._vm.authorizedKeys[innerSigningKeyId] && state._vm.authorizedKeys[innerSigningKeyId]?._notAfterHeight == null) + ? signedOutgoingData(contractID, innerSigningKeyId, data, this.transientSecretKeys) + : signedOutgoingDataWithRawKey(this.transientSecretKeys[innerSigningKeyId], data) + : data + const payload = !encryptionKeyId + ? signedMessage + : encryptedOutgoingData(contractID, encryptionKeyId, signedMessage) + const message = signedOutgoingData(contractID, signingKeyId, payload, this.transientSecretKeys) + const rootState = sbp(this.config.stateSelector) + const height = String(rootState.contracts[contractID].height) + const serializedData = { ...message.serialize((meta ?? '') + height), height } + return serializedData +} + +function parseEncryptedOrUnencryptedMessage (ctx: CheloniaContext, { + contractID, + serializedData, + meta +}: { + contractID: string, + serializedData: { height: string, _signedData: [string, string, string] }, + meta?: string | null | undefined +}): ParsedEncryptedOrUnencryptedMessage { + if (!serializedData) { + throw new TypeError('[chelonia] parseEncryptedOrUnencryptedMessage: serializedData is required') + } + const state = sbp(ctx.config.stateSelector)[contractID] + const numericHeight = parseInt(serializedData.height) + const rootState = sbp(ctx.config.stateSelector) + const currentHeight = rootState.contracts[contractID].height + if (!(numericHeight >= 0) || !(numericHeight <= currentHeight)) { + throw new Error(`[chelonia] parseEncryptedOrUnencryptedMessage: Invalid height ${serializedData.height}; it must be between 0 and ${currentHeight}`) + } + + // Additional data used for verification + const aad = (meta ?? '') + serializedData.height + + const v = signedIncomingData, T>(contractID, state, serializedData, numericHeight, aad, (message) => { + return maybeEncryptedIncomingData(contractID, state, message, numericHeight, ctx.transientSecretKeys, aad, undefined) + }) + + // Cached values + let encryptionKeyId: string | null + let innerSigningKeyId: string | null + + // Lazy unwrap function + // We don't use `unwrapMaybeEncryptedData`, which would almost do the same, + // because it swallows decryption errors, which we want to propagate to + // consumers of the KV API. + const unwrap = (() => { + let result: [T] | [undefined, unknown] + + return (): T => { + if (!result) { + try { + let unwrapped: unknown + // First, we unwrap the signed data + unwrapped = v.valueOf() as T |EncryptedData + // If this is encrypted data, attempt decryption + if (isEncryptedData(unwrapped)) { + encryptionKeyId = unwrapped.encryptionKeyId + unwrapped = unwrapped.valueOf() + + // There could be inner signed data (inner signatures), so we unwrap + // that too + if (isSignedData(unwrapped)) { + innerSigningKeyId = unwrapped.signingKeyId + unwrapped = unwrapped.valueOf() + } else { + innerSigningKeyId = null + } + } else { + encryptionKeyId = null + innerSigningKeyId = null + } + result = [unwrapped as T] + } catch (e) { + result = [undefined, e] + } + } + + if (result.length === 2) { + throw result[1] + } + return result[0] + } + })() + + const result = { + get contractID () { + return contractID + }, + get innerSigningKeyId () { + if (innerSigningKeyId === undefined) { + try { + unwrap() + } catch { + // We're not interested in an error, that'd only be for the 'data' + // accessor. + } + } + return innerSigningKeyId + }, + get encryptionKeyId () { + if (encryptionKeyId === undefined) { + try { + unwrap() + } catch { + // We're not interested in an error, that'd only be for the 'data' + // accessor. + } + } + return encryptionKeyId + }, + get signingKeyId () { + return v.signingKeyId + }, + get data () { + return unwrap() + }, + get signingContractID () { + return getContractIDfromKeyId(contractID, result.signingKeyId, state) + }, + get innerSigningContractID () { + return getContractIDfromKeyId(contractID, result.innerSigningKeyId, state) + } + } + + return result +} + +async function outEncryptedOrUnencryptedAction ( + this: CheloniaContext, + opType: 'ae' | 'au', + params: ChelActionParams +) { + const { atomic, action, contractID, data, hooks, publishOptions } = params + const contractName = contractNameFromAction(action) + const manifestHash = this.config.contracts.manifests[contractName] + const { contract } = this.manifestToContract[manifestHash] + const state = contract.state(contractID) + const meta = await contract.metadata.create() + const unencMessage = ({ action, data, meta } as SPOpActionUnencrypted) + const signedMessage = params.innerSigningKeyId + ? (state._vm.authorizedKeys[params.innerSigningKeyId] && state._vm.authorizedKeys[params.innerSigningKeyId]?._notAfterHeight == null) + ? signedOutgoingData(contractID, params.innerSigningKeyId, unencMessage, this.transientSecretKeys) + : signedOutgoingDataWithRawKey(this.transientSecretKeys[params.innerSigningKeyId], unencMessage) + : unencMessage + if (opType === SPMessage.OP_ACTION_ENCRYPTED && !params.encryptionKeyId) { + throw new Error('OP_ACTION_ENCRYPTED requires an encryption key ID be given') + } + if (params.encryptionKey) { + if (params.encryptionKeyId !== keyId(params.encryptionKey)) { + throw new Error('OP_ACTION_ENCRYPTED raw encryption key does not match encryptionKeyId') + } + } + + const payload = opType === SPMessage.OP_ACTION_UNENCRYPTED + ? signedMessage + : params.encryptionKey + ? encryptedOutgoingDataWithRawKey(params.encryptionKey, signedMessage) + : encryptedOutgoingData(contractID, params.encryptionKeyId!, signedMessage) + let message = SPMessage.createV1_0({ + contractID, + op: [ + opType, + signedOutgoingData(contractID, params.signingKeyId, payload as SPOpValue, this.transientSecretKeys) + ], + manifest: manifestHash + }) + if (!atomic) { + message = await sbp('chelonia/private/out/publishEvent', message, publishOptions, hooks) + } + return message +} + +// The gettersProxy is what makes Vue-like getters possible. In other words, +// we want to make sure that the getter functions that we defined in each +// contract get passed the 'state' when a getter is accessed. +// We pass in the state by creating a Proxy object that does it for us. +// This allows us to maintain compatibility with Vue.js and integrate +// the contract getters into the Vue-facing getters. +// For this to work, other getters need to be implemented relative to a +// 'current' getter that returns the state itself. For example: +// ``` +// { +// currentMailboxState: (state) => state, // In the contract +// currentMailboxState: (state) => state[state.currentMailboxId], // In the app +// lastMessage: (state, getters) => // Shared getter for both app and contract +// getters.currentMailboxState.messages.slice(-1).pop() +// } +// ``` +function gettersProxy (state: ChelContractState, getters: Record T[K]>) { + const proxyGetters: T = new Proxy({} as unknown as T, { + get (_target, prop) { + return getters[prop as K](state, proxyGetters) + } + }) + return { getters: proxyGetters } +} + +sbp('sbp/domains/lock', ['chelonia']) diff --git a/src/constants.ts b/src/constants.ts new file mode 100644 index 0000000..0ccc907 --- /dev/null +++ b/src/constants.ts @@ -0,0 +1,5 @@ +export const INVITE_STATUS = { + REVOKED: 'revoked', + VALID: 'valid', + USED: 'used' +} diff --git a/src/db.ts b/src/db.ts new file mode 100644 index 0000000..5a2f580 --- /dev/null +++ b/src/db.ts @@ -0,0 +1,215 @@ +import '@sbp/okturtles.data' +import '@sbp/okturtles.eventqueue' +import sbp from '@sbp/sbp' +import { Buffer } from 'buffer' +import { SPMessage } from './SPMessage.js' +import { ChelErrorDBBadPreviousHEAD, ChelErrorDBConnection } from './errors.js' +import type { CheloniaContext } from './types.js' + +const headPrefix = 'head=' + +const getContractIdFromLogHead = (key: string): string | undefined => { + if (!key.startsWith(headPrefix)) return + return key.slice(headPrefix.length) +} +const getLogHead = (contractID: string): string => `${headPrefix}${contractID}` + +type HEADInfo = { HEAD: string | null; height: number; previousKeyOp: string | null; } + +export const checkKey = (key: string): void => { + // Disallow unprintable characters, slashes, and TAB. + // Also disallow characters not allowed by Windows: + // + if (/[\x00-\x1f\x7f\t\\/<>:"|?*]/.test(key)) { // eslint-disable-line no-control-regex + throw new Error(`bad key: ${JSON.stringify(key)}`) + } +} + +export const parsePrefixableKey = (key: string): [string, string] => { + const i = key.indexOf(':') + if (i === -1) { + return ['', key] + } + const prefix = key.slice(0, i + 1) + if (prefix in prefixHandlers) { + return [prefix, key.slice(prefix.length)] + } + throw new ChelErrorDBConnection(`Unknown prefix in '${key}'.`) +} + +export const prefixHandlers: Record unknown> = { + // Decode buffers, but don't transform other values. + '': (value) => Buffer.isBuffer(value) ? value.toString('utf8') : value, + 'any:': (value) => value + /* + // 2025-03-24: Commented out because it's not used; currently, only `any:` + // is used in the `/file` route. + // Throw if the value if not a buffer. + 'blob:': value => { + if (Buffer.isBuffer(value)) { + return value + } + throw new ChelErrorDBConnection('Unexpected value: expected a buffer.') + } + */ +} + +// NOTE: To enable persistence of log use 'sbp/selectors/overwrite' +// to overwrite the following selectors: +sbp('sbp/selectors/unsafe', ['chelonia.db/get', 'chelonia.db/set', 'chelonia.db/delete']) +// NOTE: MAKE SURE TO CALL 'sbp/selectors/lock' after overwriting them! + +// When using a lightweight client, the client doesn't keep a copy of messages +// in the DB. Therefore, `chelonia.db/*` selectors are mostly turned into no-ops. +// The `chelonia.db/get` selector is slightly more complex than a no-op, because +// Chelonia relies on being able to find the current contract head. To overcome +// this, if a head is requested, 'chelonia.db/get' returns information from +// the Chelonia contract state. +const dbPrimitiveSelectors = process.env.LIGHTWEIGHT_CLIENT === 'true' + ? { + 'chelonia.db/get': function (key: string): Promise { + const id = getContractIdFromLogHead(key) + if (!id) return Promise.resolve() + const state = sbp('chelonia/rootState').contracts[id] + const value = (state?.HEAD + ? JSON.stringify({ + HEAD: state.HEAD, + height: state.height, + previousKeyOp: state.previousKeyOp + }) + : undefined) + return Promise.resolve(value) + }, + 'chelonia.db/set': function (): Promise { + return Promise.resolve() + }, + 'chelonia.db/delete': function (): Promise { + return Promise.resolve(true) + } + } + : { + // eslint-disable-next-line require-await + 'chelonia.db/get': async function (prefixableKey: string): Promise { + const [prefix, key] = parsePrefixableKey(prefixableKey) + const value = sbp('okTurtles.data/get', key) + if (value === undefined) { + return + } + return prefixHandlers[prefix](value) as HEADInfo | Buffer | string + }, + // eslint-disable-next-line require-await + 'chelonia.db/set': async function (key: string, value: Buffer | string): Promise { + checkKey(key) + return sbp('okTurtles.data/set', key, value) + }, + // eslint-disable-next-line require-await + 'chelonia.db/delete': async function (key: string): Promise { + return sbp('okTurtles.data/delete', key) + } + } + +export default sbp('sbp/selectors/register', { + ...dbPrimitiveSelectors, + 'chelonia/db/getEntryMeta': async (contractID: string, height: number) => { + const entryMetaJson = await sbp('chelonia.db/get', `_private_hidx=${contractID}#${height}`) + if (!entryMetaJson) return + + return JSON.parse(entryMetaJson) + }, + 'chelonia/db/setEntryMeta': async (contractID: string, height: number, entryMeta: object) => { + const entryMetaJson = JSON.stringify(entryMeta) + await sbp('chelonia.db/set', `_private_hidx=${contractID}#${height}`, entryMetaJson) + }, + 'chelonia/db/latestHEADinfo': async (contractID: string): Promise => { + const r: string | undefined = await sbp('chelonia.db/get', getLogHead(contractID)) + return r && JSON.parse(r) + }, + 'chelonia/db/deleteLatestHEADinfo': (contractID: string): Promise => { + return sbp('chelonia.db/set', getLogHead(contractID), '') + }, + 'chelonia/db/getEntry': async function (this: CheloniaContext, hash: string): Promise { + try { + const value: string = await sbp('chelonia.db/get', hash) + if (!value) throw new Error(`no entry for ${hash}!`) + return SPMessage.deserialize(value, this.transientSecretKeys, undefined, this.config.unwrapMaybeEncryptedData) + } catch (e) { + throw new ChelErrorDBConnection(`${(e as Error).name} during getEntry: ${(e as Error).message}`) + } + }, + 'chelonia/db/addEntry': function (entry: SPMessage): Promise { + // because addEntry contains multiple awaits - we want to make sure it gets executed + // "atomically" to minimize the chance of a contract fork + return sbp('okTurtles.eventQueue/queueEvent', `chelonia/db/${entry.contractID()}`, [ + 'chelonia/private/db/addEntry', entry + ]) + }, + // NEVER call this directly yourself! _always_ call 'chelonia/db/addEntry' instead + 'chelonia/private/db/addEntry': async function (entry: SPMessage): Promise { + try { + const { previousHEAD: entryPreviousHEAD, previousKeyOp: entryPreviousKeyOp, height: entryHeight } = entry.head() + const contractID: string = entry.contractID() + if (await sbp('chelonia.db/get', entry.hash())) { + console.warn(`[chelonia.db] entry exists: ${entry.hash()}`) + return entry.hash() + } + const HEADinfo = await sbp('chelonia/db/latestHEADinfo', contractID) + if (!entry.isFirstMessage()) { + if (!HEADinfo) { + throw new Error(`No latest HEAD for ${contractID} when attempting to process entry with previous HEAD ${entryPreviousHEAD} at height ${entryHeight}`) + } + const { HEAD: contractHEAD, previousKeyOp: contractPreviousKeyOp, height: contractHeight } = HEADinfo + if (entryPreviousHEAD !== contractHEAD) { + console.warn(`[chelonia.db] bad previousHEAD: ${entryPreviousHEAD}! Expected: ${contractHEAD} for contractID: ${contractID}`) + throw new ChelErrorDBBadPreviousHEAD(`bad previousHEAD: ${entryPreviousHEAD}. Expected ${contractHEAD} for contractID: ${contractID}`) + } else if (entryPreviousKeyOp !== contractPreviousKeyOp) { + console.error(`[chelonia.db] bad previousKeyOp: ${entryPreviousKeyOp}! Expected: ${contractPreviousKeyOp} for contractID: ${contractID}`) + throw new ChelErrorDBBadPreviousHEAD(`bad previousKeyOp: ${entryPreviousKeyOp}. Expected ${contractPreviousKeyOp} for contractID: ${contractID}`) + } else if (!Number.isSafeInteger(entryHeight) || entryHeight !== (contractHeight + 1)) { + console.error(`[chelonia.db] bad height: ${entryHeight}! Expected: ${contractHeight + 1} for contractID: ${contractID}`) + throw new ChelErrorDBBadPreviousHEAD(`[chelonia.db] bad height: ${entryHeight}! Expected: ${contractHeight + 1} for contractID: ${contractID}`) + } + } else { + if (HEADinfo) { + console.error(`[chelonia.db] bad previousHEAD: ${entryPreviousHEAD}! Expected: for contractID: ${contractID}`) + throw new ChelErrorDBBadPreviousHEAD(`bad previousHEAD: ${entryPreviousHEAD}. Expected for contractID: ${contractID}`) + } else if (entryHeight !== 0) { + console.error(`[chelonia.db] bad height: ${entryHeight}! Expected: 0 for contractID: ${contractID}`) + throw new ChelErrorDBBadPreviousHEAD(`[chelonia.db] bad height: ${entryHeight}! Expected: 0 for contractID: ${contractID}`) + } + } + await sbp('chelonia.db/set', entry.hash(), entry.serialize()) + await sbp('chelonia.db/set', getLogHead(contractID), JSON.stringify({ + HEAD: entry.hash(), + previousKeyOp: entry.isKeyOp() ? entry.hash() : entry.previousKeyOp(), + height: entry.height() + })) + console.debug(`[chelonia.db] HEAD for ${contractID} updated to:`, entry.hash()) + await sbp('chelonia/db/setEntryMeta', contractID, entryHeight, { + // The hash is used for reverse lookups (height to CID) + hash: entry.hash(), + // The date isn't currently used, but will be used for filtering messages + date: new Date().toISOString(), + // isKeyOp is used for filtering messages (the actual filtering is + // done more efficiently a separate index key, but `isKeyOp` allows + // us to bootstrap this process without having to load the full message) + // The separate index key bears the prefix `_private_keyop_idx_`. + ...(entry.isKeyOp() && { isKeyOp: true }) + }) + return entry.hash() + } catch (e) { + if ((e as Error).name.includes('ErrorDB')) { + throw e // throw the specific type of ErrorDB instance + } + throw new ChelErrorDBConnection(`${(e as Error).name} during addEntry: ${(e as Error).message}`) + } + }, + 'chelonia/db/lastEntry': async function (contractID: string): Promise { + try { + const latestHEADinfo = await sbp('chelonia/db/latestHEADinfo', contractID) + if (!latestHEADinfo) throw new Error(`contract ${contractID} has no latest hash!`) + return sbp('chelonia/db/getEntry', latestHEADinfo.HEAD) + } catch (e) { + throw new ChelErrorDBConnection(`${(e as Error).name} during lastEntry: ${(e as Error).message}`) + } + } +}) as string[] diff --git a/src/encryptedData.test.ts b/src/encryptedData.test.ts new file mode 100644 index 0000000..8880004 --- /dev/null +++ b/src/encryptedData.test.ts @@ -0,0 +1,79 @@ +import { CURVE25519XSALSA20POLY1305, keygen, keyId, serializeKey } from '@chelonia/crypto' +import * as assert from 'node:assert' +import { describe, it } from 'node:test' +import { encryptedIncomingData, encryptedOutgoingData, encryptedOutgoingDataWithRawKey } from './encryptedData.js' +import type { ChelContractState } from './types.js' + +describe('Encrypted data API', () => { + it('should encrypt outgoing data and decrypt incoming data when using a key from the state', () => { + const key = keygen(CURVE25519XSALSA20POLY1305) + const id = keyId(key) + const state = { + _vm: { + authorizedKeys: { + [id]: { + name: 'name', + purpose: ['enc'], + data: serializeKey(key, false) + } + } + } + } as ChelContractState + + const encryptedData = encryptedOutgoingData(state, id, 'foo') + assert.ok(typeof encryptedData === 'object') + assert.ok(typeof encryptedData.toString === 'function') + assert.ok(typeof encryptedData.serialize === 'function') + assert.ok(typeof encryptedData.valueOf === 'function') + assert.equal(encryptedData.valueOf(), 'foo') + + const stringifiedEncryptedData = encryptedData.toString('') + assert.notEqual(stringifiedEncryptedData, 'foo') + assert.notEqual(encryptedData.serialize(''), 'foo') + + const incoming = encryptedIncomingData('', state, JSON.parse(stringifiedEncryptedData), 0, { + [id]: key + }) + + assert.ok(typeof incoming === 'object') + assert.ok(typeof incoming.toString === 'function') + assert.ok(typeof incoming.serialize === 'function') + assert.ok(typeof incoming.valueOf === 'function') + assert.deepEqual(incoming.toJSON!(), JSON.parse(stringifiedEncryptedData)) + assert.equal(incoming.toString(), stringifiedEncryptedData) + assert.equal(incoming.valueOf(), 'foo') + }) + + it('should encrypt outgoing data and decrypt incoming data when using a raw key', () => { + const key = keygen(CURVE25519XSALSA20POLY1305) + const id = keyId(key) + + const encryptedData = encryptedOutgoingDataWithRawKey(key, 'foo') + assert.ok(typeof encryptedData === 'object') + assert.ok(typeof encryptedData.toString === 'function') + assert.ok(typeof encryptedData.serialize === 'function') + assert.ok(typeof encryptedData.valueOf === 'function') + assert.equal(encryptedData.valueOf(), 'foo') + + const serializedEncryptedData = encryptedData.serialize() + assert.notEqual(serializedEncryptedData, 'foo') + + const incoming = encryptedIncomingData('', { + _vm: { + authorizedKeys: { + [id]: { + purpose: ['enc'] + } + } + } + } as ChelContractState, serializedEncryptedData, 0, { [id]: key }) + + assert.ok(typeof incoming === 'object') + assert.ok(typeof incoming.toString === 'function') + assert.ok(typeof incoming.serialize === 'function') + assert.ok(typeof incoming.valueOf === 'function') + assert.equal(incoming.valueOf(), 'foo') + assert.deepEqual(incoming.toJSON!(), serializedEncryptedData) + assert.equal(incoming.toString(), JSON.stringify(serializedEncryptedData)) + }) +}) diff --git a/src/encryptedData.ts b/src/encryptedData.ts new file mode 100644 index 0000000..f86f32d --- /dev/null +++ b/src/encryptedData.ts @@ -0,0 +1,377 @@ +import type { Key } from '@chelonia/crypto' +import { decrypt, deserializeKey, encrypt, keyId, serializeKey } from '@chelonia/crypto' +import sbp from '@sbp/sbp' +import { has } from 'turtledash' +import { ChelErrorDecryptionError, ChelErrorDecryptionKeyNotFound, ChelErrorUnexpected } from './errors.js' +import { isRawSignedData, signedIncomingData } from './signedData.js' +import type { ChelContractState } from './types.js' + +const rootStateFn = () => sbp('chelonia/rootState') + +export interface EncryptedData { + // The ID of the encryption key used + encryptionKeyId: string, + // The unencrypted data. For outgoing data, this is the original data given + // as input. For incoming data, decryption will be attempted. + valueOf: () => T, + // The serialized _encrypted_ data. For outgoing data, encryption will be + // attempted. For incoming data, this is the original data given as input. + // The `additionalData` parameter is only used for outgoing data, and binds + // the encrypted payload to additional information. + serialize: (additionalData?: string) => [string, string], + // A string version of the serialized encrypted data (i.e., `JSON.stringify()`) + toString: (additionalData?: string) => string, + // For incoming data, this is an alias of `serialize`. Undefined for outgoing + // data. + toJSON?: () => [string, string] +} + +// `proto` & `wrapper` are utilities for `isEncryptedData` +const proto = Object.create(null, { + _isEncryptedData: { + value: true + } +}) as object + +const wrapper = (o: T): T => { + return Object.setPrototypeOf(o, proto) +} + +// `isEncryptedData` will return true for objects created by the various +// `encrypt*Data` functions. It's meant to implement functionality equivalent +// to `o instanceof EncryptedData` +export const isEncryptedData = (o: unknown): o is EncryptedData => { + return !!o && !!Object.getPrototypeOf(o)?._isEncryptedData +} + +// TODO: Check for permissions and allowedActions; this requires passing some +// additional context +const encryptData = function (stateOrContractID: string | ChelContractState, eKeyId: string, data: T, additionalData: string): [string, string] { + const state = typeof stateOrContractID === 'string' ? rootStateFn()[stateOrContractID] as ChelContractState : stateOrContractID + + // Has the key been revoked? If so, attempt to find an authorized key by the same name + const designatedKey = state?._vm?.authorizedKeys?.[eKeyId] + if (!designatedKey?.purpose.includes( + 'enc' + )) { + throw new Error(`Encryption key ID ${eKeyId} is missing or is missing encryption purpose`) + } + if (designatedKey._notAfterHeight != null) { + const name = state._vm.authorizedKeys[eKeyId].name + const newKeyId = Object.values(state._vm?.authorizedKeys).find((v) => v._notAfterHeight == null && v.name === name && v.purpose.includes('enc'))?.id + + if (!newKeyId) { + throw new Error(`Encryption key ID ${eKeyId} has been revoked and no new key exists by the same name (${name})`) + } + + eKeyId = newKeyId + } + + const key = state._vm?.authorizedKeys?.[eKeyId].data + + if (!key) { + throw new Error(`Missing encryption key ${eKeyId}`) + } + + const deserializedKey = typeof key === 'string' ? deserializeKey(key) : key + + return [ + keyId(deserializedKey), + encrypt(deserializedKey, JSON.stringify(data, (_, v) => { + if (v && has(v, 'serialize') && typeof v.serialize === 'function') { + if (v.serialize.length === 1) { + return v.serialize(additionalData) + } else { + return v.serialize() + } + } + return v + }), additionalData) + ] +} + +// TODO: Check for permissions and allowedActions; this requires passing the +// entire SPMessage +const decryptData = function (state: ChelContractState, height: number, data: [string, string], additionalKeys: Record, additionalData: string, validatorFn?: (v: T, id: string) => void): T { + if (!state) { + throw new ChelErrorDecryptionError('Missing contract state') + } + + // Compatibility with signedData (composed signed + encrypted data) + if (typeof data.valueOf === 'function') data = data.valueOf() as [string, string] + + if (!isRawEncryptedData(data)) { + throw new ChelErrorDecryptionError('Invalid message format') + } + + const [eKeyId, message] = data + const key = additionalKeys[eKeyId] + + if (!key) { + throw new ChelErrorDecryptionKeyNotFound(`Key ${eKeyId} not found`, { cause: eKeyId }) + } + + // height as NaN is used to allow checking for revokedKeys as well as + // authorizedKeys when decrypting data. This is normally inappropriate because + // revoked keys should be considered compromised and not used for encrypting + // new data + // However, OP_KEY_SHARE may include data encrypted with some other contract's + // keys when a key rotation is done. This is done, along with OP_ATOMIC and + // OP_KEY_UPDATE to rotate keys in a contract while allowing member contracts + // to retrieve and use the new key material. + // In such scenarios, since the keys really live in that other contract, it is + // impossible to know if the keys had been revoked in the 'source' contract + // at the time the key rotation was done. This is also different from foreign + // keys because these encryption keys are not necessarily authorized in the + // contract issuing OP_KEY_SHARE, and what is important is to refer to the + // (keys in the) foreign contract explicitly, as an alternative to sending + // an OP_KEY_SHARE to that contract. + // Using revoked keys represents some security risk since, as mentioned, they + // should generlly be considered compromised. However, in the scenario above + // we can trust that the party issuing OP_KEY_SHARE is not maliciously using + // old (revoked) keys, because there is little to be gained from not doing + // this. If that party's intention were to leak or compromise keys, they can + // already do so by other means, since they have access to the raw secrets + // that OP_KEY_SHARE is meant to protect. Hence, this attack does not open up + // any new attack vectors or venues that were not already available using + // different means. + const designatedKey = state._vm?.authorizedKeys?.[eKeyId] + if (!designatedKey || (height > designatedKey._notAfterHeight!) || (height < designatedKey._notBeforeHeight) || !designatedKey.purpose.includes( + 'enc' + )) { + throw new ChelErrorUnexpected( + `Key ${eKeyId} is unauthorized or expired for the current contract` + ) + } + + const deserializedKey = typeof key === 'string' ? deserializeKey(key) : key + + try { + const result = JSON.parse(decrypt(deserializedKey, message, additionalData)) + if (typeof validatorFn === 'function') validatorFn(result, eKeyId) + return result + } catch (e) { + throw new ChelErrorDecryptionError((e as Error)?.message || e as string) + } +} + +export const encryptedOutgoingData = (stateOrContractID: string | ChelContractState, eKeyId: string, data: T): EncryptedData => { + if (!stateOrContractID || data === undefined || !eKeyId) throw new TypeError('Invalid invocation') + + const boundStringValueFn = encryptData.bind(null, stateOrContractID, eKeyId, data) + + return wrapper({ + get encryptionKeyId () { + return eKeyId + }, + get serialize () { + return (additionalData?: string) => boundStringValueFn(additionalData || '') + }, + get toString () { + return (additionalData?: string) => JSON.stringify(this.serialize(additionalData)) + }, + get valueOf () { + return () => data + } + }) +} + +// Used for OP_CONTRACT as a state does not yet exist +export const encryptedOutgoingDataWithRawKey = (key: Key, data: T): EncryptedData => { + if (data === undefined || !key) throw new TypeError('Invalid invocation') + + const eKeyId = keyId(key) + const state = { + _vm: { + authorizedKeys: { + [eKeyId]: { + purpose: ['enc'], + data: serializeKey(key, false), + _notBeforeHeight: 0, + _notAfterHeight: undefined + } + } + } + } as ChelContractState + const boundStringValueFn = encryptData.bind(null, state, eKeyId, data) + + return wrapper({ + get encryptionKeyId () { + return eKeyId + }, + get serialize () { + return (additionalData?: string) => boundStringValueFn(additionalData || '') + }, + get toString () { + return (additionalData?: string) => JSON.stringify(this.serialize(additionalData)) + }, + get valueOf () { + return () => data + } + }) +} + +export const encryptedIncomingData = (contractID: string, state: ChelContractState, data: [string, string], height: number, additionalKeys?: Record, additionalData?: string, validatorFn?: (v: T, id: string) => void): EncryptedData => { + let decryptedValue: T + const decryptedValueFn = (): T => { + if (decryptedValue) { + return decryptedValue + } + if (!state || !additionalKeys) { + const rootState = rootStateFn() + state = state || rootState[contractID] + additionalKeys = additionalKeys ?? rootState.secretKeys + } + decryptedValue = decryptData(state, height, data, additionalKeys!, additionalData || '', validatorFn) + + if (isRawSignedData(decryptedValue)) { + decryptedValue = signedIncomingData(contractID, state, decryptedValue, height, additionalData || '') as unknown as T + } + + return decryptedValue + } + + return wrapper({ + get encryptionKeyId () { + return encryptedDataKeyId(data) + }, + get serialize () { + return () => data + }, + get toString () { + return () => JSON.stringify(this.serialize()) + }, + get valueOf () { + return decryptedValueFn + }, + get toJSON () { + return this.serialize + } + }) +} + +export const encryptedIncomingForeignData = (contractID: string, _0: never, data: [string, string], _1: never, additionalKeys?: Record, additionalData?: string, validatorFn?: (v: T, id: string) => void): EncryptedData => { + let decryptedValue: T + const decryptedValueFn = (): T => { + if (decryptedValue) { + return decryptedValue + } + const rootState = rootStateFn() + const state = rootState[contractID] + decryptedValue = decryptData(state, NaN, data, additionalKeys ?? rootState.secretKeys, additionalData || '', validatorFn) + + if (isRawSignedData(decryptedValue)) { + // TODO: Specify height + return signedIncomingData(contractID, state, decryptedValue, NaN, additionalData || '') as unknown as T + } + + return decryptedValue + } + + return wrapper({ + get encryptionKeyId () { + return encryptedDataKeyId(data) + }, + get serialize () { + return () => data + }, + get toString () { + return () => JSON.stringify(this.serialize()) + }, + get valueOf () { + return decryptedValueFn + }, + get toJSON () { + return this.serialize + } + }) +} + +export const encryptedIncomingDataWithRawKey = (key: Key, data: [string, string], additionalData?: string): EncryptedData => { + if (data === undefined || !key) throw new TypeError('Invalid invocation') + + let decryptedValue: T + const eKeyId = keyId(key) + const decryptedValueFn = (): T => { + if (decryptedValue) { + return decryptedValue + } + const state = { + _vm: { + authorizedKeys: { + [eKeyId]: { + purpose: ['enc'], + data: serializeKey(key, false), + _notBeforeHeight: 0, + _notAfterHeight: undefined + } + } + } + } as ChelContractState + decryptedValue = decryptData(state, NaN, data, { [eKeyId]: key }, additionalData || '') + + return decryptedValue + } + + return wrapper({ + get encryptionKeyId () { + return encryptedDataKeyId(data) + }, + get serialize () { + return () => data + }, + get toString () { + return () => JSON.stringify(this.serialize()) + }, + get valueOf () { + return decryptedValueFn + }, + get toJSON () { + return this.serialize + } + }) +} + +export const encryptedDataKeyId = (data: unknown): string => { + if (!isRawEncryptedData(data)) { + throw new ChelErrorDecryptionError('Invalid message format') + } + + return data[0] +} + +export const isRawEncryptedData = (data: unknown): data is [string, string] => { + if (!Array.isArray(data) || data.length !== 2 || data.map(v => typeof v).filter(v => v !== 'string').length !== 0) { + return false + } + + return true +} + +export const unwrapMaybeEncryptedData = (data: T | EncryptedData): { encryptionKeyId: string | null, data: T } | undefined => { + if (data == null) return + if (isEncryptedData(data)) { + try { + return { + encryptionKeyId: data.encryptionKeyId, + data: data.valueOf() + } + } catch (e) { + console.warn('unwrapMaybeEncryptedData: Unable to decrypt', e) + } + } else { + return { + encryptionKeyId: null, + data + } + } +} + +export const maybeEncryptedIncomingData = (contractID: string, state: ChelContractState, data: T | [string, string], height: number, additionalKeys?: Record, additionalData?: string, validatorFn?: (v: T, id: string) => void): T | EncryptedData => { + if (isRawEncryptedData(data)) { + return encryptedIncomingData(contractID, state, data, height, additionalKeys, additionalData, validatorFn) + } else { + validatorFn?.(data, '') + return data + } +} diff --git a/src/errors.ts b/src/errors.ts new file mode 100644 index 0000000..ca0f1fe --- /dev/null +++ b/src/errors.ts @@ -0,0 +1,36 @@ +// ugly boilerplate because JavaScript is stupid +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error#Custom_Error_Types +export const ChelErrorGenerator = ( + name: string, + base: ErrorConstructor = Error +): ErrorConstructor => + ((class extends base { + constructor (...params: ConstructorParameters) { + super(...params) + this.name = name // string literal so minifier doesn't overwrite + // Polyfill for cause property + if (params[1]?.cause !== this.cause) { + Object.defineProperty(this, 'cause', { configurable: true, writable: true, value: params[1]?.cause }) + } + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor) + } + } + }) as ErrorConstructor) + +export const ChelErrorWarning: typeof Error = ChelErrorGenerator('ChelErrorWarning') +export const ChelErrorAlreadyProcessed: typeof Error = ChelErrorGenerator('ChelErrorAlreadyProcessed') +export const ChelErrorDBBadPreviousHEAD: typeof Error = ChelErrorGenerator('ChelErrorDBBadPreviousHEAD') +export const ChelErrorDBConnection: typeof Error = ChelErrorGenerator('ChelErrorDBConnection') +export const ChelErrorUnexpected: typeof Error = ChelErrorGenerator('ChelErrorUnexpected') +export const ChelErrorKeyAlreadyExists: typeof Error = ChelErrorGenerator('ChelErrorKeyAlreadyExists') +export const ChelErrorUnrecoverable: typeof Error = ChelErrorGenerator('ChelErrorUnrecoverable') +export const ChelErrorForkedChain: typeof Error = ChelErrorGenerator('ChelErrorForkedChain') +export const ChelErrorDecryptionError: typeof Error = ChelErrorGenerator('ChelErrorDecryptionError') +export const ChelErrorDecryptionKeyNotFound: typeof Error = ChelErrorGenerator('ChelErrorDecryptionKeyNotFound', ChelErrorDecryptionError) +export const ChelErrorSignatureError: typeof Error = ChelErrorGenerator('ChelErrorSignatureError') +export const ChelErrorSignatureKeyUnauthorized: typeof Error = ChelErrorGenerator('ChelErrorSignatureKeyUnauthorized', ChelErrorSignatureError) +export const ChelErrorSignatureKeyNotFound: typeof Error = ChelErrorGenerator('ChelErrorSignatureKeyNotFound', ChelErrorSignatureError) +export const ChelErrorFetchServerTimeFailed: typeof Error = ChelErrorGenerator('ChelErrorFetchServerTimeFailed') +export const ChelErrorUnexpectedHttpResponseCode: typeof Error = ChelErrorGenerator('ChelErrorUnexpectedHttpResponseCode') +export const ChelErrorResourceGone: typeof Error = ChelErrorGenerator('ChelErrorResourceGone', ChelErrorUnexpectedHttpResponseCode) diff --git a/src/events.ts b/src/events.ts new file mode 100644 index 0000000..6273f40 --- /dev/null +++ b/src/events.ts @@ -0,0 +1,15 @@ +export const CHELONIA_RESET = 'chelonia-reset' +export const CONTRACT_IS_SYNCING = 'contract-is-syncing' +export const CONTRACTS_MODIFIED = 'contracts-modified' +export const CONTRACTS_MODIFIED_READY = 'contracts-modified-ready' +export const EVENT_HANDLED = 'event-handled' +export const EVENT_PUBLISHED = 'event-published' +export const EVENT_PUBLISHING_ERROR = 'event-publishing-error' +export const EVENT_HANDLED_READY = 'event-handled-ready' +export const CONTRACT_REGISTERED = 'contract-registered' +export const CONTRACT_UNREGISTERED = 'contract-unregistered' +export const CONTRACT_IS_PENDING_KEY_REQUESTS = 'contract-is-pending-key-requests' +export const CONTRACT_HAS_RECEIVED_KEYS = 'contract-has-received-keys' +export const PERSISTENT_ACTION_FAILURE = 'persistent-action-failure' +export const PERSISTENT_ACTION_SUCCESS = 'persistent-action-success' +export const PERSISTENT_ACTION_TOTAL_FAILURE = 'persistent-action-total_failure' diff --git a/src/files.ts b/src/files.ts new file mode 100644 index 0000000..84f62b0 --- /dev/null +++ b/src/files.ts @@ -0,0 +1,401 @@ +import encodeMultipartMessage from '@apeleghq/multipart-parser/encodeMultipartMessage' +import decrypt from '@apeleghq/rfc8188/decrypt' +import { aes256gcm } from '@apeleghq/rfc8188/encodings' +import encrypt from '@apeleghq/rfc8188/encrypt' +import { generateSalt } from '@chelonia/crypto' +import { coerce } from '@chelonia/multiformats/bytes' +import sbp from '@sbp/sbp' +import { Buffer } from 'buffer' +import { has } from 'turtledash' +import type { Secret } from './Secret.js' +import { blake32Hash, createCID, createCIDfromStream, multicodes } from './functions.js' +import { ChelFileManifest, CheloniaContext } from './types.js' +import { buildShelterAuthorizationHeader } from './utils.js' + +// Snippet from +// Node.js supports request streams, but also this check isn't meant for Node.js +// This part only checks for client-side support. Later, when we try uploading +// a file for the first time, we'll check if requests work, as streams are not +// supported in HTTP/1.1 and lower versions. +let supportsRequestStreams: boolean | 2 = typeof window !== 'object' || (() => { + let duplexAccessed = false + + const hasContentType = new Request('', { + body: new ReadableStream(), + method: 'POST', + get duplex () { + duplexAccessed = true + return 'half' + } + } as unknown as Request).headers.has('content-type') + + return duplexAccessed && !hasContentType +})() + +const streamToUint8Array = async (s: ReadableStream) => { + const reader = s.getReader() + const chunks: Uint8Array[] = [] + let length = 0 + for (;;) { + const result = await reader.read() + if (result.done) break + chunks.push(coerce(result.value)) + length += result.value.byteLength + } + const body = new Uint8Array(length) + chunks.reduce((offset, chunk) => { + body.set(chunk, offset) + return offset + chunk.byteLength + }, 0) + return body +} + +// Check for streaming support, as of today (Feb 2024) only Blink- +// based browsers support this (i.e., Firefox and Safari don't). +const ArrayBufferToUint8ArrayStream = async function (this: CheloniaContext, connectionURL: string, s: ReadableStream) { + // Even if the browser supports streams, some browsers (e.g., Chrome) also + // require that the server support HTTP/2 + if (supportsRequestStreams === true) { + await this.config.fetch(`${connectionURL}/streams-test`, { + method: 'POST', + body: new ReadableStream({ start (c) { c.enqueue(Buffer.from('ok')); c.close() } }), + duplex: 'half' + } as unknown as Request).then((r) => { + if (!r.ok) throw new Error('Unexpected response') + // supportsRequestStreams is tri-state + supportsRequestStreams = 2 + }).catch(() => { + console.info('files: Disabling streams support because the streams test failed') + supportsRequestStreams = false + }) + } + if (!supportsRequestStreams) { + return await streamToUint8Array(s) + } + + return s.pipeThrough( + // eslint-disable-next-line no-undef + new TransformStream( + { + transform (chunk, controller) { + controller.enqueue(coerce(chunk)) + } + }) + ) +} + +const computeChunkDescriptors = (inStream: ReadableStream) => { + let length = 0 + const [lengthStream, cidStream] = inStream.tee() + const lengthPromise = new Promise((resolve, reject) => { + lengthStream.pipeTo(new WritableStream({ + write (chunk) { + length += chunk.byteLength + }, + close () { + resolve(length) + }, + abort (reason) { + reject(reason) + } + })) + }) + const cidPromise = createCIDfromStream(cidStream, multicodes.SHELTER_FILE_CHUNK) + return Promise.all([lengthPromise, cidPromise]) +} + +const fileStream = (chelonia: CheloniaContext, manifest: ChelFileManifest) => { + const dataGenerator = async function * () { + let readSize = 0 + for (const chunk of manifest.chunks) { + if ( + !Array.isArray(chunk) || + typeof chunk[0] !== 'number' || + typeof chunk[1] !== 'string' + ) { + throw new Error('Invalid chunk descriptor') + } + const chunkResponse = await chelonia.config.fetch(`${chelonia.config.connectionURL}/file/${chunk[1]}`, { + method: 'GET', + signal: chelonia.abortController.signal + }) + if (!chunkResponse.ok) { + throw new Error('Unable to retrieve manifest') + } + // TODO: We're reading the chunks in their entirety instead of using the + // stream interface. In the future, this can be changed to get a stream + // instead. Ensure then that the following checks are replaced with a + // streaming version (length and CID) + const chunkBinary = await chunkResponse.arrayBuffer() + if (chunkBinary.byteLength !== chunk[0]) throw new Error('mismatched chunk size') + readSize += chunkBinary.byteLength + if (readSize > manifest.size) throw new Error('read size exceeds declared size') + if (createCID(coerce(chunkBinary), multicodes.SHELTER_FILE_CHUNK) !== chunk[1]) throw new Error('mismatched chunk hash') + yield chunkBinary + } + // Now that we're done, we check to see if we read the correct size + // If all went well, we should have and this would never throw. However, + // if the payload was tampered with, we could have read a different size + // than expected. This will throw at the end, after all chunks are processed + // and after some or all of the data have already been consumed. + // If integrity of the entire payload is important, consumers must buffer + // the stream and wait until the end before any processing. + if (readSize !== manifest.size) throw new Error('mismatched size') + } + + const dataIterator = dataGenerator() + + return new ReadableStream({ + async pull (controller) { + try { + const chunk = await dataIterator.next() + if (chunk.done) { + controller.close() + return + } + controller.enqueue(chunk.value) + } catch (e) { + controller.error(e) + } + } + }) +} + +export const aes256gcmHandlers = { + upload: (_chelonia: CheloniaContext, manifestOptions: ChelFileManifest) => { + // IKM stands for Input Keying Material, and is a random value used to + // derive the encryption used in the chunks. See RFC 8188 for how the + // actual encryption key gets derived from the IKM. + const params = manifestOptions['cipher-params'] as Record + let IKM = params?.IKM as string | Uint8Array + const recordSize = (params?.rs ?? 1 << 16) as number + if (!IKM) { + IKM = new Uint8Array(33) + self.crypto.getRandomValues(IKM) + } + // The keyId is only used as a sanity check but otherwise it is not needed + // Because the keyId is computed from the IKM, which is a secret, it is + // truncated to just eight characters so that it doesn't disclose too much + // information about the IKM (in theory, since it's a random string 33 bytes + // long, a full hash shouldn't disclose too much information anyhow). + // The reason the keyId is not _needed_ is that the IKM is part of the + // downloadParams, so anyone downloading a file should have the required + // context, and there is exactly one valid IKM for any downloadParams. + // By truncating the keyId, the only way to fully verify whether a given + // IKM decrypts a file is by attempting decryption. + // A side-effect of truncating the keyId is that, if the IKM were shared + // some other way (e.g., using the OP_KEY_SHARE mechanism), because of + // collisions it may not always be possible to look up the correct IKM. + // Therefore, a handler that uses a different strategy than the one used + // here (namely, including the IKM in the downloadParams) may need to use + // longer key IDs, possibly a full hash. + const keyId = blake32Hash('aes256gcm-keyId' + blake32Hash(IKM)).slice(-8) + const binaryKeyId = Buffer.from(keyId) + return { + cipherParams: { + keyId + }, + streamHandler: async (stream: ReadableStream) => { + return await encrypt(aes256gcm, stream, recordSize, binaryKeyId, IKM as Uint8Array) + }, + downloadParams: { + IKM: Buffer.from(IKM as Uint8Array).toString('base64'), + rs: recordSize + } + } + }, + download: (chelonia: CheloniaContext, downloadParams: { IKM?: string, rs?: number }, manifest: ChelFileManifest) => { + const IKMb64 = downloadParams.IKM + if (!IKMb64) { + throw new Error('Missing IKM in downloadParams') + } + const IKM = Buffer.from(IKMb64, 'base64') + const keyId = blake32Hash('aes256gcm-keyId' + blake32Hash(IKM)).slice(-8) + if (!manifest['cipher-params'] || !(manifest['cipher-params'] as Record).keyId) { + throw new Error('Missing cipher-params') + } + if (keyId !== (manifest['cipher-params'] as Record).keyId) { + throw new Error('Key ID mismatch') + } + const maxRecordSize = downloadParams.rs ?? 1 << 27 // 128 MiB + return { + payloadHandler: async () => { + const bytes = await streamToUint8Array( + decrypt(aes256gcm, fileStream(chelonia, manifest), (actualKeyId) => { + if (Buffer.from(actualKeyId).toString() !== keyId) { + throw new Error('Invalid key ID') + } + return IKM + }, maxRecordSize) + ) + return new Blob([bytes], { type: manifest.type || 'application/octet-stream' }) + } + } + } +} + +export const noneHandlers = { + upload: () => { + return { + cipherParams: undefined, + streamHandler: (stream: ReadableStream) => { + return stream + }, + downloadParams: undefined + } + }, + download: (chelonia: CheloniaContext, _downloadParams: object, manifest: ChelFileManifest) => { + return { + payloadHandler: async () => { + const bytes = await streamToUint8Array(fileStream(chelonia, manifest)) + return new Blob([bytes], { type: manifest.type || 'application/octet-stream' }) + } + } + } +} + +// TODO: Move into Chelonia config +const cipherHandlers = { + aes256gcm: aes256gcmHandlers, + none: noneHandlers +} + +export default sbp('sbp/selectors/register', { + 'chelonia/fileUpload': async function (this: CheloniaContext, chunks: Blob | Blob[], manifestOptions: ChelFileManifest, { billableContractID }: { billableContractID?: string } = {}) { + if (!Array.isArray(chunks)) chunks = [chunks] + const chunkDescriptors: Promise<[number, string]>[] = [] + const cipherHandler = await cipherHandlers[manifestOptions.cipher as keyof typeof cipherHandlers]?.upload?.(this, manifestOptions) + if (!cipherHandler) throw new Error('Unsupported cipher') + const cipherParams = cipherHandler.cipherParams + const transferParts = await Promise.all(chunks.map(async (chunk: Blob, i) => { + const stream = chunk.stream() + const encryptedStream = await cipherHandler.streamHandler(stream) + const [body, s] = encryptedStream.tee() + chunkDescriptors.push(computeChunkDescriptors(s)) + return { + headers: new Headers([ + ['content-disposition', `form-data; name="${i}"; filename="${i}"`], + ['content-type', 'application/octet-stream'] + ]), + body + } + })) + transferParts.push({ + headers: new Headers([ + ['content-disposition', 'form-data; name="manifest"; filename="manifest.json"'], + ['content-type', 'application/vnd.shelter.filemanifest'] + ]), + body: new ReadableStream({ + async start (controller) { + const chunks = await Promise.all(chunkDescriptors) + const manifest = { + version: '1.0.0', + // ?? undefined coerces null and undefined to undefined + // This ensures that null or undefined values don't make it to the + // JSON (otherwise, null values _would_ be stringified as 'null') + type: manifestOptions.type ?? undefined, + meta: manifestOptions.meta ?? undefined, + cipher: manifestOptions.cipher, + 'cipher-params': cipherParams, + size: chunks.reduce((acc, [cv]) => acc + cv, 0), + chunks, + 'name-map': manifestOptions['name-map'] ?? undefined, + alternatives: manifestOptions.alternatives ?? undefined + } + controller.enqueue(Buffer.from(JSON.stringify(manifest))) + controller.close() + } + }) + }) + // TODO: Using `self.crypto.randomUUID` breaks the tests. Maybe upgrading + // Cypress would fix this. + const boundary = typeof self.crypto?.randomUUID === 'function' + ? self.crypto.randomUUID() + // If randomUUID not available, we instead compute a random boundary + // The indirect call to Math.random (`(0, Math.random)`) is to explicitly + // mark that we intend on using Math.random, even though it's not a + // CSPRNG, so that it's not reported as a bug in by static analysis tools. + : new Array(36).fill('').map(() => + 'abcdefghijklmnopqrstuvwxyz'[(0, Math.random)() * 26 | 0]).join('') + const stream = encodeMultipartMessage(boundary, transferParts) + + const deletionToken = 'deletionToken' + generateSalt() + const deletionTokenHash = blake32Hash(deletionToken) + + const uploadResponse = await this.config.fetch(`${this.config.connectionURL}/file`, { + method: 'POST', + signal: this.abortController.signal, + body: await ArrayBufferToUint8ArrayStream.call(this, this.config.connectionURL, stream), + headers: new Headers([ + ...(billableContractID ? [['authorization', buildShelterAuthorizationHeader.call(this, billableContractID)]] : []) as [string, string][], + ['content-type', `multipart/form-data; boundary=${boundary}`], + ['shelter-deletion-token-digest', deletionTokenHash] + ]), + duplex: 'half' + } as unknown as Request) + + if (!uploadResponse.ok) throw new Error('Error uploading file') + return { + download: { + manifestCid: await uploadResponse.text(), + downloadParams: cipherHandler.downloadParams + }, + delete: deletionToken + } + }, + 'chelonia/fileDownload': async function (this: CheloniaContext, downloadOptions: Secret<{ manifestCid: string, downloadParams: { IKM?: string, rs?: number } }>, manifestChecker?: (manifest: ChelFileManifest) => boolean | Promise) { + // Using a function to prevent accidental logging + const { manifestCid, downloadParams } = downloadOptions.valueOf() + const manifestResponse = await this.config.fetch(`${this.config.connectionURL}/file/${manifestCid}`, { + method: 'GET', + signal: this.abortController.signal + }) + if (!manifestResponse.ok) { + throw new Error('Unable to retrieve manifest') + } + const manifestBinary = await manifestResponse.arrayBuffer() + if (createCID(coerce(manifestBinary), multicodes.SHELTER_FILE_MANIFEST) !== manifestCid) throw new Error('mismatched manifest hash') + const manifest = JSON.parse(Buffer.from(manifestBinary).toString()) as ChelFileManifest + if (typeof manifest !== 'object') throw new Error('manifest format is invalid') + if (manifest.version !== '1.0.0') throw new Error('unsupported manifest version') + if (!Array.isArray(manifest.chunks)) throw new Error('missing required field: chunks') + + if (manifestChecker) { + const proceed = await manifestChecker?.(manifest) + if (!proceed) return false + } + + const cipherHandler = await cipherHandlers[manifest.cipher as keyof typeof cipherHandlers]?.download?.(this, downloadParams, manifest) + if (!cipherHandler) throw new Error('Unsupported cipher') + + return cipherHandler.payloadHandler() + }, + 'chelonia/fileDelete': async function (this: CheloniaContext, manifestCid: string | string[], credentials: { [manifestCid: string]: { token?: string | null | undefined, billableContractID?: string | null | undefined } } = {}) { + if (!manifestCid) { + throw new TypeError('A manifest CID must be provided') + } + if (!Array.isArray(manifestCid)) manifestCid = [manifestCid] + return await Promise.allSettled(manifestCid.map(async (cid) => { + const hasCredential = has(credentials, cid) + const hasToken = has(credentials[cid], 'token') && credentials[cid].token + const hasBillableContractID = has(credentials[cid], 'billableContractID') && credentials[cid].billableContractID + if (!hasCredential || hasToken === hasBillableContractID) { + throw new TypeError(`Either a token or a billable contract ID must be provided for ${cid}`) + } + + const response = await this.config.fetch(`${this.config.connectionURL}/deleteFile/${cid}`, { + method: 'POST', + signal: this.abortController.signal, + headers: new Headers([ + ['authorization', + hasToken + ? `bearer ${credentials[cid].token!.valueOf()}` + : buildShelterAuthorizationHeader.call(this, credentials[cid].billableContractID!)] + ]) + }) + if (!response.ok) { + throw new Error(`Unable to delete file ${cid}`) + } + })) + } +}) as string[] diff --git a/src/functions.ts b/src/functions.ts new file mode 100644 index 0000000..95b39c0 --- /dev/null +++ b/src/functions.ts @@ -0,0 +1,129 @@ +import type { Digest } from '@chelonia/multiformats' +import { base58btc } from '@chelonia/multiformats/bases/base58' +import { blake2b256 } from '@chelonia/multiformats/blake2b' +import { blake2b256stream } from '@chelonia/multiformats/blake2bstream' +import { CID } from '@chelonia/multiformats/cid' +// Use 'buffer' instead of 'node:buffer' to polyfill in the browser +import { Buffer } from 'buffer' + +// Values from https://github.com/multiformats/multicodec/blob/master/table.csv +export const multicodes: Record = { + RAW: 0x00, + JSON: 0x0200, + SHELTER_CONTRACT_MANIFEST: 0x511e00, + SHELTER_CONTRACT_TEXT: 0x511e01, + SHELTER_CONTRACT_DATA: 0x511e02, + SHELTER_FILE_MANIFEST: 0x511e03, + SHELTER_FILE_CHUNK: 0x511e04 +} + +export const parseCID = (cid: string): CID => { + if (!cid || cid.length < 52 || cid.length > 64) { + throw new RangeError('CID length too short or too long') + } + const parsed = CID.parse(cid, base58btc) + if ( + parsed.version !== 1 || + parsed.multihash.code !== blake2b256.code || + !Object.values(multicodes).includes(parsed.code) + ) { + throw new Error('Invalid CID') + } + + return parsed +} + +export const maybeParseCID = (cid: string): CID | null => { + try { + return parseCID(cid) + } catch { + // Ignore errors if the CID couldn't be parsed + return null + } +} + +export async function createCIDfromStream (data: string | Uint8Array | ReadableStream, multicode: number = multicodes.RAW): Promise { + const uint8array = typeof data === 'string' ? new TextEncoder().encode(data) : data + const digest = await blake2b256stream.digest(uint8array) + return CID.create(1, multicode, digest).toString(base58btc) +} + +// TODO: implement a streaming hashing function for large files. +// Note: in fact this returns a serialized CID, not a CID object. +export function createCID (data: string | Uint8Array, multicode: number = multicodes.RAW): string { + const uint8array = typeof data === 'string' ? new TextEncoder().encode(data) : data + const digest = blake2b256.digest(uint8array) as Digest + return CID.create(1, multicode, digest).toString(base58btc) +} + +export function blake32Hash (data: string | Uint8Array): string { + const uint8array = typeof data === 'string' ? new TextEncoder().encode(data) : data + const digest = blake2b256.digest(uint8array) as Digest + // While `digest.digest` is only 32 bytes long in this case, + // `digest.bytes` is 36 bytes because it includes a multiformat prefix. + return base58btc.encode(digest.bytes) +} + +// NOTE: to preserve consistency across browser and node, we use the Buffer +// class. We could use btoa and atob in web browsers (functions that +// are unavailable on Node.js), but they do not support Unicode, +// and you have to jump through some hoops to get it to work: +// https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/btoa#Unicode_strings +// These hoops might result in inconsistencies between Node.js and the frontend. +export const b64ToBuf = (b64: string): Buffer => Buffer.from(b64, 'base64') +export const b64ToStr = (b64: string): string => b64ToBuf(b64).toString('utf8') +export const bufToB64 = (buf: Buffer): string => Buffer.from(buf).toString('base64') +export const strToBuf = (str: string): Buffer => Buffer.from(str, 'utf8') +export const strToB64 = (str: string): string => strToBuf(str).toString('base64') +export const bytesToB64 = (ary: Uint8Array): string => Buffer.from(ary).toString('base64') + +// Generate an UUID from a `PushSubscription' +export const getSubscriptionId = async (subscriptionInfo: ReturnType): Promise => { + const textEncoder = new TextEncoder() + // + const endpoint = textEncoder.encode(subscriptionInfo.endpoint) + // + const p256dh = textEncoder.encode(subscriptionInfo.keys!.p256dh) + const auth = textEncoder.encode(subscriptionInfo.keys!.auth) + + const canonicalForm = new ArrayBuffer( + 8 + + (4 + endpoint.byteLength) + (2 + p256dh.byteLength) + + (2 + auth.byteLength) + ) + const canonicalFormU8 = new Uint8Array(canonicalForm) + const canonicalFormDV = new DataView(canonicalForm) + let offset = 0 + canonicalFormDV.setFloat64( + offset, + subscriptionInfo.expirationTime == null + ? NaN + : subscriptionInfo.expirationTime, + false + ) + offset += 8 + canonicalFormDV.setUint32(offset, endpoint.byteLength, false) + offset += 4 + canonicalFormU8.set(endpoint, offset) + offset += endpoint.byteLength + canonicalFormDV.setUint16(offset, p256dh.byteLength, false) + offset += 2 + canonicalFormU8.set(p256dh, offset) + offset += p256dh.byteLength + canonicalFormDV.setUint16(offset, auth.byteLength, false) + offset += 2 + canonicalFormU8.set(auth, offset) + + const digest = await crypto.subtle.digest('SHA-384', canonicalForm) + const id = Buffer.from(digest.slice(0, 16)) + id[6] = 0x80 | (id[6] & 0x0F) + id[8] = 0x80 | (id[8] & 0x3F) + + return [ + id.slice(0, 4), + id.slice(4, 6), + id.slice(6, 8), + id.slice(8, 10), + id.slice(10, 16) + ].map((p) => p.toString('hex')).join('-') +} diff --git a/src/index.test.ts b/src/index.test.ts new file mode 100644 index 0000000..460fc53 --- /dev/null +++ b/src/index.test.ts @@ -0,0 +1,3 @@ +import './encryptedData.test.js' +import './persistent-actions.test.js' +import './pubsub/index.test.js' diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 0000000..52a0def --- /dev/null +++ b/src/index.ts @@ -0,0 +1,22 @@ +import chelonia from './chelonia.js' +import db from './db.js' +import files from './files.js' +import persistentActions from './persistent-actions.js' + +export * from './SPMessage.js' +export * from './Secret.js' +export * from './chelonia.js' +export * from './constants.js' +export * from './db.js' +export * from './encryptedData.js' +export * from './errors.js' +export * from './events.js' +export * from './files.js' +export * from './persistent-actions.js' +export * from './presets.js' +export * from './pubsub/index.js' +export * from './signedData.js' +export * from './types.js' +export * from './utils.js' + +export default [...chelonia, ...db, ...files, ...persistentActions] diff --git a/src/internals.ts b/src/internals.ts new file mode 100644 index 0000000..38ea4ab --- /dev/null +++ b/src/internals.ts @@ -0,0 +1,2345 @@ +import sbp, { domainFromSelector } from '@sbp/sbp' +import { multicodes } from './functions.js' +import { cloneDeep, debounce, delay, has, pick, randomIntFromRange } from 'turtledash' +import type { SPKey, SPOpActionEncrypted, SPOpActionUnencrypted, SPOpAtomic, SPOpContract, SPOpKeyAdd, SPOpKeyDel, SPOpKeyRequest, SPOpKeyRequestSeen, SPOpKeyShare, SPOpKeyUpdate, SPOpPropSet, SPOpType, SPOp, ProtoSPOpActionUnencrypted, SPOpMap } from './SPMessage.js' +import { SPMessage } from './SPMessage.js' +import { Secret } from './Secret.js' +import { INVITE_STATUS } from './constants.js' +import { deserializeKey, keyId, verifySignature } from '@chelonia/crypto' +import './db.js' +import { encryptedIncomingData, encryptedOutgoingData } from './encryptedData.js' +import type { EncryptedData } from './encryptedData.js' +import { ChelErrorKeyAlreadyExists, ChelErrorResourceGone, ChelErrorUnrecoverable, ChelErrorWarning, ChelErrorDBBadPreviousHEAD, ChelErrorAlreadyProcessed, ChelErrorFetchServerTimeFailed, ChelErrorForkedChain } from './errors.js' +import { CONTRACTS_MODIFIED, CONTRACT_HAS_RECEIVED_KEYS, CONTRACT_IS_SYNCING, EVENT_HANDLED, EVENT_PUBLISHED, EVENT_PUBLISHING_ERROR } from './events.js' +import { buildShelterAuthorizationHeader, findKeyIdByName, findSuitablePublicKeyIds, findSuitableSecretKeyId, getContractIDfromKeyId, handleFetchResult, keyAdditionProcessor, logEvtError, recreateEvent, validateKeyPermissions, validateKeyAddPermissions, validateKeyDelPermissions, validateKeyUpdatePermissions } from './utils.js' +import { isSignedData, signedIncomingData } from './signedData.js' +import { ChelContractKey, ChelContractManifest, ChelContractManifestBody, ChelContractProcessMessageObject, ChelContractSideeffectMutationObject, ChelContractState, ChelRootState, CheloniaConfig, CheloniaContext, SendMessageHooks } from './types.js' +// import 'ses' + +// Used for temporarily storing the missing decryption key IDs in a given +// message +const missingDecryptionKeyIdsMap = new WeakMap>() + +const getMsgMeta = function (this: CheloniaContext, message: SPMessage, contractID: string, state: ChelContractState, index?: number) { + const signingKeyId = message.signingKeyId() + let innerSigningKeyId: string | null | undefined = null + const config = this.config + + const result = { + signingKeyId, + get signingContractID () { + return getContractIDfromKeyId(contractID, signingKeyId, state) + }, + get innerSigningKeyId () { + if (innerSigningKeyId === null) { + const value = message.message() + const data = config.unwrapMaybeEncryptedData(value) + if (data?.data && isSignedData(data.data)) { + innerSigningKeyId = data.data.signingKeyId + } else { + innerSigningKeyId = undefined + } + return innerSigningKeyId + } + }, + get innerSigningContractID () { + return getContractIDfromKeyId(contractID, result.innerSigningKeyId, state) + }, + index + } + + return result +} + +const keysToMap = function (this: CheloniaContext, keys_: (SPKey | EncryptedData)[], height: number, authorizedKeys?: ChelContractState['_vm']['authorizedKeys']): ChelContractState['_vm']['authorizedKeys'] { + // Using cloneDeep to ensure that the returned object is serializable + // Keys in a SPMessage may not be serializable (i.e., supported by the + // structured clone algorithm) when they contain encryptedIncomingData + const keys = keys_.map((key) => { + const data = this.config.unwrapMaybeEncryptedData(key) + if (!data) return undefined + if (data.encryptionKeyId) { + data.data._private = data.encryptionKeyId + } + return data.data + // eslint-disable-next-line no-use-before-define + }).filter(Boolean as unknown as (v: unknown) => v is ChelContractKey) as ChelContractKey[] + + const keysCopy = cloneDeep(keys) as typeof keys + return Object.fromEntries(keysCopy.map((key) => { + key._notBeforeHeight = height + if (authorizedKeys?.[key.id]) { + if (authorizedKeys[key.id]._notAfterHeight == null) { + throw new ChelErrorKeyAlreadyExists(`Cannot set existing unrevoked key: ${key.id}`) + } + // If the key was get previously, preserve its _notBeforeHeight + // NOTE: (SECURITY) This may allow keys for periods for which it wasn't + // supposed to be active. This is a trade-off for simplicity, instead of + // considering discrete periods, which is the correct solution + // Discrete ranges *MUST* be implemented because they impact permissions + key._notBeforeHeight = Math.min(height, authorizedKeys[key.id]._notBeforeHeight ?? 0) + } else { + key._notBeforeHeight = height + } + delete key._notAfterHeight + return [key.id, key] + })) +} + +const keyRotationHelper = (contractID: string, state: ChelContractState, config: CheloniaConfig, updatedKeysMap: Record, requiredPermissions: string[], outputSelector: string, outputMapper: (name: [string, string]) => T, internalSideEffectStack?: (({ state, message }: { state: ChelContractState, message: SPMessage }) => void)[]) => { + if (!internalSideEffectStack || !Array.isArray(state._volatile?.watch)) return + + const rootState = sbp(config.stateSelector) + const watchMap: Record = Object.create(null) + + state._volatile.watch.forEach(([name, cID]) => { + if (!updatedKeysMap[name] || watchMap[cID] === null) { + return + } + if (!watchMap[cID]) { + if (!rootState.contracts[cID]?.type || !findSuitableSecretKeyId(rootState[cID], [SPMessage.OP_KEY_UPDATE], ['sig'])) { + watchMap[cID] = null + return + } + + watchMap[cID] = [] + } + + watchMap[cID]!.push(name) + }) + + Object.entries(watchMap).forEach(([cID, names]) => { + if (!Array.isArray(names) || !names.length) return + + const [keyNamesToUpdate, signingKeyId] = names.map((name): [[name: string, foreignName: string], signingKeyId: string, ringLevel: number] | undefined => { + const foreignContractKey = rootState[cID]?._vm?.authorizedKeys?.[updatedKeysMap[name].oldKeyId] + + if (!foreignContractKey) return undefined + + const signingKeyId = findSuitableSecretKeyId(rootState[cID], requiredPermissions, ['sig'], foreignContractKey.ringLevel) + + if (signingKeyId) { + return [[name, foreignContractKey.name], signingKeyId, rootState[cID]._vm.authorizedKeys[signingKeyId].ringLevel] + } + + return undefined + // eslint-disable-next-line no-use-before-define + }).filter(Boolean as unknown as (x: unknown) => x is [[name: string, foreignName: string], signingKeyId: string, ringLevel: number]) + .reduce<[[name: string, foreignName: string][], signingKeyId: string | undefined, ringLevel: number]>((acc, [name, signingKeyId, ringLevel]) => { + acc[0].push(name) + return ringLevel < acc[2] ? [acc[0], signingKeyId, ringLevel] : acc + }, [[] as [name: string, foreignName: string][], undefined, Number.POSITIVE_INFINITY]) + + if (!signingKeyId) return + + // Send output based on keyNamesToUpdate, signingKeyId + const contractName = rootState.contracts[cID]?.type + + internalSideEffectStack?.push(() => { + // We can't await because it'll block on a different contract, which + // is possibly waiting on this current contract. + sbp(outputSelector, { + contractID: cID, + contractName, + data: keyNamesToUpdate.map(outputMapper).map((v) => { + return v + }), + signingKeyId + }).catch((e: unknown) => { + console.warn(`Error mirroring key operation (${outputSelector}) from ${contractID} to ${cID}: ${(e as Error)?.message || e}`) + }) + }) + }) +} + +// export const FERAL_FUNCTION = Function + +export default sbp('sbp/selectors/register', { + // DO NOT CALL ANY OF THESE YOURSELF! + 'chelonia/private/state': function (this: CheloniaContext) { + return this.state + }, + 'chelonia/private/invoke': function (this: CheloniaContext, instance: object, invocation: Parameters | { (): void }) { + // If this._instance !== instance (i.e., chelonia/reset was called) + if (this._instance !== instance) { + console.info('[\'chelonia/private/invoke] Not proceeding with invocation as Chelonia was restarted', { invocation }) + return + } + if (Array.isArray(invocation)) { + return sbp(...invocation) + } else if (typeof invocation === 'function') { + return invocation() + } else { + throw new TypeError(`[chelonia/private/invoke] Expected invocation to be an array or a function. Saw ${typeof invocation} instead.`) + } + }, + 'chelonia/private/queueEvent': function (this: CheloniaContext, queueName: string, invocation: Parameters | { (): void }) { + return sbp('okTurtles.eventQueue/queueEvent', queueName, ['chelonia/private/invoke', this._instance, invocation]) + }, + 'chelonia/private/verifyManifestSignature': function (this: CheloniaContext, contractName: string, manifestHash: string, manifest: ChelContractManifest) { + // We check that the manifest contains a 'signature' field with the correct + // shape + if (!has(manifest, 'signature') || typeof manifest.signature.keyId !== 'string' || typeof manifest.signature.value !== 'string') { + throw new Error(`Invalid or missing signature field for manifest ${manifestHash} (named ${contractName})`) + } + + // Now, start the signature verification process + const rootState = sbp(this.config.stateSelector) + if (!has(rootState, 'contractSigningKeys')) { + this.config.reactiveSet(rootState, 'contractSigningKeys', Object.create(null)) + } + // Because `contractName` comes from potentially unsafe sources (for + // instance, from `processMessage`), the key isn't used directly because + // it could overlap with current or future 'special' key names in JavaScript, + // such as `prototype`, `__proto__`, etc. We also can't guarantee that the + // `contractSigningKeys` always has a null prototype, and, because of the + // way we manage state, neither can we use `Map`. So, we use prefix for the + // lookup key that's unlikely to ever be part of a special JS name. + const contractNameLookupKey = `name:${contractName}` + // If the contract name has been seen before, validate its signature now + let signatureValidated = false + if (process.env.UNSAFE_TRUST_ALL_MANIFEST_SIGNING_KEYS !== 'true' && has(rootState.contractSigningKeys, contractNameLookupKey)) { + console.info(`[chelonia] verifying signature for ${manifestHash} with an existing key`) + if (!has(rootState.contractSigningKeys[contractNameLookupKey], manifest.signature.keyId)) { + console.error(`The manifest with ${manifestHash} (named ${contractName}) claims to be signed with a key with ID ${manifest.signature.keyId}, which is not trusted. The trusted key IDs for this name are:`, Object.keys(rootState.contractSigningKeys[contractNameLookupKey])) + throw new Error(`Invalid or missing signature in manifest ${manifestHash} (named ${contractName}). It claims to be signed with a key with ID ${manifest.signature.keyId}, which has not been authorized for this contract before.`) + } + const signingKey = rootState.contractSigningKeys[contractNameLookupKey][manifest.signature.keyId] + verifySignature(signingKey, manifest.body + manifest.head, manifest.signature.value) + console.info(`[chelonia] successful signature verification for ${manifestHash} (named ${contractName}) using the already-trusted key ${manifest.signature.keyId}.`) + signatureValidated = true + } + // Otherwise, when this is a yet-unseen contract, we parse the body to + // see its allowed signers to trust on first-use (TOFU) + const body = JSON.parse(manifest.body) as ChelContractManifestBody + // If we don't have a list of authorized signatures yet, verify this + // contract's signature and set the auhorized signing keys + if (!signatureValidated) { + console.info(`[chelonia] verifying signature for ${manifestHash} (named ${contractName}) for the first time`) + if (!has(body, 'signingKeys') || !Array.isArray(body.signingKeys)) { + throw new Error(`Invalid manifest file ${manifestHash} (named ${contractName}). Its body doesn't contain a 'signingKeys' list'`) + } + let contractSigningKeys: { [idx: string]: string} + try { + contractSigningKeys = Object.fromEntries(body.signingKeys.map((serializedKey) => { + return [ + keyId(serializedKey), + serializedKey + ] + })) + } catch (e) { + console.error(`[chelonia] Error parsing the public keys list for ${manifestHash} (named ${contractName})`, e) + throw e + } + if (!has(contractSigningKeys, manifest.signature.keyId)) { + throw new Error(`Invalid or missing signature in manifest ${manifestHash} (named ${contractName}). It claims to be signed with a key with ID ${manifest.signature.keyId}, which is not listed in its 'signingKeys' field.`) + } + verifySignature(contractSigningKeys[manifest.signature.keyId], manifest.body + manifest.head, manifest.signature.value) + console.info(`[chelonia] successful signature verification for ${manifestHash} (named ${contractName}) using ${manifest.signature.keyId}. The following key IDs will now be trusted for this contract name`, Object.keys(contractSigningKeys)) + signatureValidated = true + rootState.contractSigningKeys[contractNameLookupKey] = contractSigningKeys + } + + // If verification was successful, return the parsed body to make the newly- + // loaded contract available + return body + }, + 'chelonia/private/loadManifest': async function (this: CheloniaContext, contractName: string, manifestHash: string) { + if (!contractName || typeof contractName !== 'string') { + throw new Error('Invalid or missing contract name') + } + if (this.manifestToContract[manifestHash]) { + console.warn('[chelonia]: already loaded manifest', manifestHash) + return + } + const manifestSource = await sbp('chelonia/out/fetchResource', manifestHash, { code: multicodes.SHELTER_CONTRACT_MANIFEST }) + const manifest = JSON.parse(manifestSource) + const body = sbp('chelonia/private/verifyManifestSignature', contractName, manifestHash, manifest) + if (body.name !== contractName) { + throw new Error(`Mismatched contract name. Expected ${contractName} but got ${body.name}`) + } + const contractInfo = (this.config.contracts.defaults.preferSlim && body.contractSlim) || body.contract + console.info(`[chelonia] loading contract '${contractInfo.file}'@'${body.version}' from manifest: ${manifestHash}`) + const source = await sbp('chelonia/out/fetchResource', contractInfo.hash, { code: multicodes.SHELTER_CONTRACT_TEXT }) + const reduceAllow = (acc: Record, v: T) => { acc[v] = true; return acc } + const allowedSels: Record = ['okTurtles.events/on', 'chelonia/defineContract', 'chelonia/out/keyRequest'] + .concat(this.config.contracts.defaults.allowedSelectors) + .reduce(reduceAllow, {}) + const allowedDoms: Record = this.config.contracts.defaults.allowedDomains + .reduce(reduceAllow, {}) + const contractSBP = (selector: string, ...args: unknown[]) => { + const domain = domainFromSelector(selector) + if (selector.startsWith(contractName + '/')) { + selector = `${manifestHash}/${selector}` + } + if (allowedSels[selector] || allowedDoms[domain]) { + return sbp(selector, ...args) + } else { + console.error('[chelonia] selector not on allowlist', { selector, allowedSels, allowedDoms }) + throw new Error(`[chelonia] selector not on allowlist: '${selector}'`) + } + } + // const saferEval: Function = new FERAL_FUNCTION(` + // eslint-disable-next-line no-new-func + const saferEval = new Function(` + return function (globals) { + // almost a real sandbox + // stops (() => this)().fetch + // needs additional step of locking down Function constructor to stop: + // new (()=>{}).constructor("console.log(typeof this.fetch)")() + globals.self = globals + globals.globalThis = globals + with (new Proxy(globals, { + get (o, p) { return o[p] }, + has (o, p) { /* console.log('has', p); */ return true } + })) { + (function () { + 'use strict' + ${source} + })() + } + } + `)() + // TODO: lock down Function constructor! could just use SES lockdown() + // or do our own version of it. + // https://github.com/endojs/endo/blob/master/packages/ses/src/tame-function-constructors.js + this.defContractSBP = contractSBP + this.defContractManifest = manifestHash + // contracts will also be signed, so even if sandbox breaks we still have protection + saferEval({ + // pass in globals that we want access to by default in the sandbox + // note: you can undefine these by setting them to undefined in exposedGlobals + crypto: { + getRandomValues: (v: T) => globalThis.crypto.getRandomValues(v) + }, + ...(typeof window === 'object' && window && { + alert: window.alert.bind(window), + confirm: window.confirm.bind(window), + prompt: window.prompt.bind(window) + }), + isNaN, + console, + Object, + Error, + TypeError, + RangeError, + Math, + Symbol, + Date, + Array, + BigInt, + Boolean, + String, + Number, + Int8Array, + Int16Array, + Int32Array, + Uint8Array, + Uint16Array, + Uint32Array, + Float32Array, + Float64Array, + ArrayBuffer, + JSON, + RegExp, + parseFloat, + parseInt, + Promise, + Function, + Map, + WeakMap, + ...this.config.contracts.defaults.exposedGlobals, + require: (dep: string) => { + return dep === '@sbp/sbp' + ? contractSBP + : this.config.contracts.defaults.modules[dep] + }, + sbp: contractSBP, + fetchServerTime: async (fallback: boolean = true) => { + // If contracts need the current timestamp (for example, for metadata 'createdDate') + // they must call this function so that clients are kept synchronized to the server's + // clock, for consistency, so that if one client's clock is off, it doesn't conflict + // with other client's clocks. + // See: https://github.com/okTurtles/group-income/issues/531 + try { + const response = await this.config.fetch(`${this.config.connectionURL}/time`, { signal: this.abortController.signal }) + return handleFetchResult('text')(response) + } catch (e) { + console.warn('[fetchServerTime] Error', e) + if (fallback) { + return new Date(sbp('chelonia/time')).toISOString() + } + throw new ChelErrorFetchServerTimeFailed('Can not fetch server time. Please check your internet connection.') + } + } + }) + if (contractName !== this.defContract.name) { + throw new Error(`Invalid contract name for manifest ${manifestHash}. Expected ${contractName} but got ${this.defContract.name}`) + } + this.defContractSelectors.forEach(s => { allowedSels[s] = true }) + this.manifestToContract[manifestHash] = { + slim: contractInfo === body.contractSlim, + info: contractInfo, + contract: this.defContract + } + }, + // Warning: avoid using this unless you know what you're doing. Prefer using /remove. + 'chelonia/private/removeImmediately': function (this: CheloniaContext, contractID: string, params?: { permanent?: boolean, resync?: boolean }) { + const state = sbp(this.config.stateSelector) + const contractName = state.contracts[contractID]?.type + if (!contractName) { + console.error('[chelonia/private/removeImmediately] Missing contract name for contract', { contractID }) + return + } + + const manifestHash = this.config.contracts.manifests[contractName] + if (manifestHash) { + const destructor = `${manifestHash}/${contractName}/_cleanup` + // Check if a destructor is defined + if (sbp('sbp/selectors/fn', destructor)) { + // And call it + try { + sbp(destructor, { contractID, resync: !!params?.resync, state: state[contractID] }) + } catch (e) { + console.error(`[chelonia/private/removeImmediately] Error at destructor for ${contractID}`, e) + } + } + } + + if (params?.resync) { + // If re-syncing, keep the reference count + Object.keys(state.contracts[contractID]) + .filter((k) => k !== 'references') + .forEach((k) => this.config.reactiveDel(state.contracts[contractID], k)) + // If re-syncing, keep state._volatile.watch + Object.keys(state[contractID]) + .filter((k) => k !== '_volatile') + .forEach((k) => this.config.reactiveDel(state[contractID], k)) + if (state[contractID]._volatile) { + Object.keys(state[contractID]._volatile) + .filter((k) => k !== 'watch') + .forEach((k) => this.config.reactiveDel(state[contractID]._volatile, k)) + } + } else { + delete this.ephemeralReferenceCount[contractID] + if (params?.permanent) { + // Keep a 'null' state to remember permanently-deleted contracts + // (e.g., when they've been removed from the server) + this.config.reactiveSet(state.contracts, contractID, null) + } else { + this.config.reactiveDel(state.contracts, contractID) + } + this.config.reactiveDel(state, contractID) + } + + this.subscriptionSet.delete(contractID) + // calling this will make pubsub unsubscribe for events on `contractID` + sbp('okTurtles.events/emit', CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { + added: [], + removed: [contractID], + permanent: params?.permanent, + resync: params?.resync + }) + }, + // used by, e.g. 'chelonia/contract/wait' + 'chelonia/private/noop': function () {}, + 'chelonia/private/out/sync': function (this: CheloniaContext, contractIDs: string | string[], params?: { force?: boolean, resync?: boolean }): Promise { + const listOfIds = typeof contractIDs === 'string' ? [contractIDs] : contractIDs + const forcedSync = !!params?.force + return Promise.all(listOfIds.map(contractID => { + // If this isn't a forced sync and we're already subscribed to the contract, + // only wait on the event queue (as events should come over the subscription) + if (!forcedSync && this.subscriptionSet.has(contractID)) { + const rootState = sbp(this.config.stateSelector) + // However, if the contract has been marked as dirty (meaning its state + // could be wrong due to newly received encryption keys), sync it anyhow + // (i.e., disregard the force flag and proceed to sync the contract) + if (!rootState[contractID]?._volatile?.dirty) { + return sbp('chelonia/private/queueEvent', contractID, ['chelonia/private/noop']) + } + } + // enqueue this invocation in a serial queue to ensure + // handleEvent does not get called on contractID while it's syncing, + // but after it's finished. This is used in tandem with + // queuing the 'chelonia/private/in/handleEvent' selector, defined below. + // This prevents handleEvent getting called with the wrong previousHEAD for an event. + return sbp('chelonia/private/queueEvent', contractID, [ + 'chelonia/private/in/syncContract', contractID, params + ]).catch((err: unknown) => { + console.error(`[chelonia] failed to sync ${contractID}:`, err) + throw err // re-throw the error + }) + })) + }, + 'chelonia/private/out/publishEvent': function (this: CheloniaContext, entry: SPMessage, { maxAttempts = 5, headers, billableContractID, bearer }: { maxAttempts?: number, headers?: Record, billableContractID?: string, bearer?: string } = {}, hooks: SendMessageHooks) { + const contractID = entry.contractID() + const originalEntry = entry + + return sbp('chelonia/private/queueEvent', `publish:${contractID}`, async () => { + let attempt = 1 + let lastAttemptedHeight + // prepublish is asynchronous to allow for cleanly sending messages to + // different contracts + await hooks?.prepublish?.(entry) + + const onreceivedHandler = (_contractID: string, message: SPMessage) => { + if (entry.hash() === message.hash()) { + sbp('okTurtles.events/off', EVENT_HANDLED, onreceivedHandler) + hooks.onprocessed!(entry) + } + } + + if (typeof hooks?.onprocessed === 'function') { + sbp('okTurtles.events/on', EVENT_HANDLED, onreceivedHandler) + } + + // auto resend after short random delay + // https://github.com/okTurtles/group-income/issues/608 + while (true) { + // Queued event to ensure that we send the event with whatever the + // 'latest' state may be for that contract (in case we were receiving + // something over the web socket) + // This also ensures that the state doesn't change while reading it + lastAttemptedHeight = entry.height() + const newEntry = await sbp('chelonia/private/queueEvent', contractID, async () => { + const rootState = sbp(this.config.stateSelector) + const state = rootState[contractID] + const isFirstMessage = entry.isFirstMessage() + + if (!state && !isFirstMessage) { + console.info(`[chelonia] Not sending message as contract state has been removed: ${entry.description()}`) + return + } + + if (hooks?.preSendCheck) { + if (!await hooks.preSendCheck(entry, state)) { + console.info(`[chelonia] Not sending message as preSendCheck hook returned non-truish value: ${entry.description()}`) + return + } + } + + // Process message to ensure that it is valid. Should this throw, + // we propagate the error. Calling `processMessage` will perform + // validation by checking signatures, well-formedness and, in the case + // of actions, by also calling both the `validate` method (which + // doesn't mutate the state) and the `process` method (which could + // mutate the state). + // `SPMessage` objects have an implicit `direction` field that's set + // based on how the object was constructed. For messages that will be + // sent to the server (this case), `direction` is set to `outgoing`. + // This `direction` affects how certain errors are reported during + // processing, and is also exposed to contracts (which could then + // alter their behavior based on this) to support some features (such + // as showing users that a certain message is 'pending'). + // Validation ensures that we don't write messages known to be invalid. + // Although those invalid messages will be ignored if sent anyhow, + // sending them is wasteful. + // The only way to know for sure if a message is valid or not is using + // the same logic that would be used if the message was received, + // hence the call to `processMessage`. Validation requires having the + // state and all mutations that would be applied. For example, when + // joining a chatroom, this is usually done by sending an OP_ATOMIC + // that contains OP_KEY_ADD and OP_ACTION_ENCRYPTED. Correctly + // validating this operation requires applying the OP_KEY_ADD to the + // state in order to know whether OP_ACTION_ENCRYPTED has a valid + // signature or not. + // We also rely on this logic to keep different contracts in sync + // when there are side-effects. For example, the side-effect in a + // group for someone joining a chatroom can call the `join` action + // on the chatroom unconditionally, since validation will prevent + // the message from being sent. + // Because of this, 'chelonia/private/in/processMessage' SHOULD NOT + // change the global Chelonia state and it MUST NOT call any + // side-effects or change the global state in a way that affects + // the meaning of any future messages or successive invocations. + // Note: mutations to the contract state, if any, are immediately + // discarded (see the temporary object created using `cloneDeep`). + await sbp('chelonia/private/in/processMessage', entry, cloneDeep(state || {})) + + // if this isn't the first event (i.e., OP_CONTRACT), recreate and + // resend message + // This is mainly to set height and previousHEAD. For the first event, + // this doesn't need to be done because previousHEAD is always undefined + // and height is always 0. + // We always call recreateEvent because we may have received new events + // in the web socket + if (!isFirstMessage) { + return recreateEvent(entry, state, rootState.contracts[contractID]) + } + + return entry + }) + + // If there is no event to send, return + if (!newEntry) return + + await hooks?.beforeRequest?.(newEntry, entry) + entry = newEntry + + const r = await this.config.fetch(`${this.config.connectionURL}/event`, { + method: 'POST', + body: entry.serialize(), + headers: { + ...headers, + ...bearer && { + Authorization: `Bearer ${bearer}` + }, + ...billableContractID && { + Authorization: buildShelterAuthorizationHeader.call(this, billableContractID) + }, + 'Content-Type': 'text/plain' + }, + signal: this.abortController.signal + }) + if (r.ok) { + await hooks?.postpublish?.(entry) + return entry + } + try { + if (r.status === 409) { + if (attempt + 1 > maxAttempts) { + console.error(`[chelonia] failed to publish ${entry.description()} after ${attempt} attempts`, entry) + throw new Error(`publishEvent: ${r.status} - ${r.statusText}. attempt ${attempt}`) + } + // create new entry + const randDelay = randomIntFromRange(0, 1500) + console.warn(`[chelonia] publish attempt ${attempt} of ${maxAttempts} failed. Waiting ${randDelay} msec before resending ${entry.description()}`) + attempt += 1 + await delay(randDelay) // wait randDelay ms before sending it again + + // TODO: The [pubsub] code seems to miss events that happened between + // a call to sync and the subscription time. This is a temporary measure + // to handle this until [pubsub] is updated. + if (!entry.isFirstMessage() && entry.height() === lastAttemptedHeight) { + await sbp('chelonia/private/out/sync', contractID, { force: true }) + } + } else { + const message = (await r.json())?.message + console.error(`[chelonia] ERROR: failed to publish ${entry.description()}: ${r.status} - ${r.statusText}: ${message}`, entry) + throw new Error(`publishEvent: ${r.status} - ${r.statusText}: ${message}`) + } + } catch (e) { + sbp('okTurtles.events/off', EVENT_HANDLED, onreceivedHandler) + + throw e + } + } + }).then((entry: SPMessage) => { + sbp('okTurtles.events/emit', EVENT_PUBLISHED, { contractID, message: entry, originalMessage: originalEntry }) + return entry + }).catch((e: unknown) => { + sbp('okTurtles.events/emit', EVENT_PUBLISHING_ERROR, { contractID, message: entry, originalMessage: originalEntry, error: e }) + throw e + }) + }, + 'chelonia/private/out/latestHEADinfo': function (this: CheloniaContext, contractID: string) { + return this.config.fetch(`${this.config.connectionURL}/latestHEADinfo/${contractID}`, { + cache: 'no-store', + signal: this.abortController.signal + }).then(handleFetchResult('json')) + }, + 'chelonia/private/postKeyShare': function (this: CheloniaContext, contractID: string, previousVolatileState: ChelContractState['_volatile'], signingKey: SPKey) { + const cheloniaState = sbp(this.config.stateSelector) + const targetState = cheloniaState[contractID] as ChelContractState + + if (!targetState) return + + if (previousVolatileState && has(previousVolatileState, 'watch')) { + if (!targetState._volatile) this.config.reactiveSet(targetState, '_volatile', Object.create(null)) + if (!targetState._volatile!.watch) { + this.config.reactiveSet(targetState._volatile!, 'watch', previousVolatileState.watch) + } else if (targetState._volatile!.watch !== previousVolatileState.watch) { + previousVolatileState.watch!.forEach((pWatch) => { + if (!targetState._volatile!.watch!.some((tWatch) => { + return (tWatch[0] === pWatch[0]) && (tWatch[1] === pWatch[1]) + })) { + targetState._volatile!.watch!.push(pWatch) + } + }) + } + } + + if (!Array.isArray(targetState._volatile?.pendingKeyRequests)) return + + this.config.reactiveSet( + targetState._volatile, 'pendingKeyRequests', + targetState._volatile.pendingKeyRequests.filter((pkr) => + pkr?.name !== signingKey.name + ) + ) + }, + 'chelonia/private/in/processMessage': async function (this: CheloniaContext, message: SPMessage, state: ChelContractState, internalSideEffectStack?: (({ state, message }: { state: ChelContractState, message: SPMessage }) => void)[], contractName?: string) { + const [opT, opV] = message.op() + const hash = message.hash() + const height = message.height() + const contractID = message.contractID() + const manifestHash = message.manifest() + const signingKeyId = message.signingKeyId() + const direction = message.direction() + const config = this.config + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this + const opName = Object.entries(SPMessage).find(([, y]) => y === opT)?.[0] + console.debug('PROCESSING OPCODE:', opName, 'to', contractID) + if (state?._volatile?.dirty) { + console.debug('IGNORING OPCODE BECAUSE CONTRACT STATE IS MARKED AS DIRTY.', 'OPCODE:', opName, 'CONTRACT:', contractID) + return + } + if (!state._vm) state._vm = Object.create(null) + const opFns: { + [K in keyof SPOpMap]: (op: SPOpMap[K]) => void | Promise + } = { + /* + There are two types of "errors" that we need to consider: + 1. "Ignoring" errors + 2. "Failure" errors + Example: OP_KEY_ADD + 1. IGNORING: an error is thrown because we wanted to add a key but the key we wanted to add is already there. This is not a hard error, it's an ignoring error. We don't care that the operation failed in this case because the intent was accomplished. + 2. FAILURE: an error is thrown while attempting to add a key that doesn't exist. + Example: OP_ACTION_ENCRYPTED + 1. IGNORING: An error is thrown because we don't have the key to decrypt the action. We ignore it. + 2. FAILURE: An error is thrown by the process function during processing. + Handling these in OP_ATOMIC + • ALL errors of class "IGNORING" should be ignored. They should not impact our ability to process the rest of the operations in the OP_ATOMIC. No matter how many of these are thrown, it doesn't affect the rest of the operations. + • ANY error of class "FAILURE" will call the rest of the operations to fail and the state to be reverted to prior to the OP_ATOMIC. No side-effects should be run. Because an intention failed. + */ + async [SPMessage.OP_ATOMIC] (v: SPOpAtomic) { + for (let i = 0; i < v.length; i++) { + const u = v[i] + try { + if ((u[0] as string) === SPMessage.OP_ATOMIC) throw new Error('Cannot nest OP_ATOMIC') + if (!validateKeyPermissions(message, config, state, signingKeyId, u[0], u[1])) { + throw new Error('Inside OP_ATOMIC: no matching signing key was defined') + } + await (opFns[u[0]] as (x: unknown) => Promise)(u[1]) + } catch (e_) { + const e = e_ as Error + if (e && typeof e === 'object') { + if (e.name === 'ChelErrorDecryptionKeyNotFound') { + console.warn(`[chelonia] [OP_ATOMIC] WARN '${e.name}' in processMessage for ${message.description()}: ${e.message}`, e, message.serialize()) + if (e.cause) { + const missingDecryptionKeyIds = missingDecryptionKeyIdsMap.get(message) + if (missingDecryptionKeyIds) { + missingDecryptionKeyIds.add(e.cause as unknown as string) + } else { + missingDecryptionKeyIdsMap.set(message, new Set([e.cause as unknown as string])) + } + } + continue + } else { + logEvtError(message, `[chelonia] [OP_ATOMIC] ERROR '${e.name}' in processMessage for ${message.description()}: ${e.message || e}`, e, message.serialize()) + } + console.warn(`[chelonia] [OP_ATOMIC] Error processing ${message.description()}: ${message.serialize()}. Any side effects will be skipped!`) + if (config.strictProcessing) { + throw e + } + config.hooks.processError?.(e, message, getMsgMeta.call(self, message, contractID, state)) + if (e.name === 'ChelErrorWarning') continue + } else { + logEvtError(message, 'Inside OP_ATOMIC: Non-object or null error thrown', contractID, message, i, e) + } + throw e + } + } + }, + [SPMessage.OP_CONTRACT] (v: SPOpContract) { + state._vm.type = v.type + const keys = keysToMap.call(self, v.keys, height) + state._vm.authorizedKeys = keys + // Loop through the keys in the contract and try to decrypt all of the private keys + // Example: in the identity contract you have the IEK, IPK, CSK, and CEK. + // When you login you have the IEK which is derived from your password, and you + // will use it to decrypt the rest of the keys which are encrypted with that. + // Specifically, the IEK is used to decrypt the CSKs and the CEKs, which are + // the encrypted versions of the CSK and CEK. + keyAdditionProcessor.call(self, message, hash, v.keys, state, contractID, signingKey, internalSideEffectStack) + }, + [SPMessage.OP_ACTION_ENCRYPTED] (v: SPOpActionEncrypted) { + if (config.skipActionProcessing) { + if (!config.skipDecryptionAttempts) { + console.log('OP_ACTION_ENCRYPTED: skipped action processing') + } + return + } + return opFns[SPMessage.OP_ACTION_UNENCRYPTED](v.valueOf()) + }, + async [SPMessage.OP_ACTION_UNENCRYPTED] (v: SPOpActionUnencrypted) { + if (!config.skipActionProcessing) { + let innerSigningKeyId: string | undefined + if (isSignedData(v)) { + innerSigningKeyId = v.signingKeyId + v = v.valueOf() as ProtoSPOpActionUnencrypted + } + + const { data, meta, action } = v as ProtoSPOpActionUnencrypted + + if (!config.whitelisted(action)) { + throw new Error(`chelonia: action not whitelisted: '${action}'`) + } + + await sbp( + `${manifestHash}/${action}/process`, + { + data, + meta, + hash, + height, + contractID, + direction: message.direction(), + signingKeyId, + get signingContractID () { + return getContractIDfromKeyId(contractID, signingKeyId, state) + }, + innerSigningKeyId, + get innerSigningContractID () { + return getContractIDfromKeyId(contractID, innerSigningKeyId, state) + } + } as ChelContractProcessMessageObject, + state + ) + } + }, + [SPMessage.OP_KEY_SHARE] (wv: SPOpKeyShare) { + // TODO: Prompt to user if contract not in pending + + const data = config.unwrapMaybeEncryptedData(wv) + if (!data) return + const v = data.data + + for (const key of v.keys) { + if (key.id && key.meta?.private?.content) { + if (!has(state._vm, 'sharedKeyIds')) state._vm.sharedKeyIds = [] + if (!state._vm.sharedKeyIds!.some((sK) => sK.id === key.id)) state._vm.sharedKeyIds!.push({ id: key.id, contractID: v.contractID, height, keyRequestHash: v.keyRequestHash, keyRequestHeight: v.keyRequestHeight }) + } + } + + // If this is a response to an OP_KEY_REQUEST (marked by the + // presence of the keyRequestHash attribute), then we'll mark the + // key request as completed + // TODO: Verify that the keyRequestHash is what we expect (on the + // other contact's state, we should have a matching structure in + // state._volatile.pendingKeyRequests = [ + // { contractID: "this", name: "name of this signingKeyId", reference: "this reference", hash: "KA" }, ..., but we don't + // have a copy of the keyRequestHash (this would need a new + // message to ourselves in the KR process), so for now we trust + // that if it has keyRequestHash, it's a response to a request + // we sent. + // For similar reasons, we can't check pendingKeyRequests, because + // depending on how and in which order events are processed, it may + // not be available. + // ] + if (has(v, 'keyRequestHash') && state._vm.authorizedKeys[signingKeyId].meta?.keyRequest) { + state._vm.authorizedKeys[signingKeyId].meta!.keyRequest!.responded = hash + } + + internalSideEffectStack?.push(async () => { + delete self.postSyncOperations[contractID]?.['pending-keys-for-' + v.contractID] + + const cheloniaState = sbp(self.config.stateSelector) as ChelRootState + + const targetState = cheloniaState[v.contractID] + const missingDecryptionKeyIds = cheloniaState.contracts[v.contractID]?.missingDecryptionKeyIds + + let newestEncryptionKeyHeight = Number.POSITIVE_INFINITY + for (const key of v.keys) { + if (key.id && key.meta?.private?.content) { + // Outgoing messages' keys are always transient + const transient = direction === 'outgoing' || key.meta.private.transient + if ( + !sbp('chelonia/haveSecretKey', key.id, !transient) + ) { + try { + const decrypted = key.meta.private.content.valueOf() + sbp('chelonia/storeSecretKeys', new Secret([{ + key: deserializeKey(decrypted), + transient + }])) + // If we've just received a known missing key (i.e., a key + // that previously resulted in a decryption error), we know + // our state is outdated and we need to re-sync the contract + if (missingDecryptionKeyIds?.includes(key.id)) { + newestEncryptionKeyHeight = Number.NEGATIVE_INFINITY + } else if ( + // Otherwise, we make an educated guess on whether a re-sync + // is needed based on the height. + targetState?._vm?.authorizedKeys?.[key.id]?._notBeforeHeight != null && + Array.isArray(targetState._vm.authorizedKeys[key.id].purpose) && + targetState._vm.authorizedKeys[key.id].purpose.includes('enc') + ) { + newestEncryptionKeyHeight = Math.min(newestEncryptionKeyHeight, targetState._vm.authorizedKeys[key.id]._notBeforeHeight) + } + } catch (e_) { + const e = e_ as Error | undefined + if (e?.name === 'ChelErrorDecryptionKeyNotFound') { + console.warn(`OP_KEY_SHARE (${hash} of ${contractID}) missing secret key: ${e.message}`, + e) + } else { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`OP_KEY_SHARE (${hash} of ${contractID}) error '${e!.message || e}':`, + e) + } + } + } + } + } + + // If an encryption key has been shared with _notBefore lower than the + // current height, then the contract must be resynced. + const mustResync = !!(newestEncryptionKeyHeight < cheloniaState.contracts[v.contractID]?.height) + + if (mustResync) { + if (!has(targetState, '_volatile')) config.reactiveSet(targetState, '_volatile', Object.create(null)) + config.reactiveSet(targetState._volatile!, 'dirty', true) + + if (!Object.keys(targetState).some((k) => k !== '_volatile')) { + // If the contract only has _volatile state, we don't force sync it + return + } + + // Mark contracts that have foreign keys that have been received + // as dirty + // First, we group watched keys by key and contracts + const keyDict = Object.create(null) as Record + targetState._volatile?.watch?.forEach(([keyName, contractID]) => { + if (!keyDict[keyName]) { + keyDict[keyName] = [contractID] + return + } + keyDict[keyName].push(contractID) + }) + // Then, see which of those contracts need to be updated + const contractIdsToUpdate = Array.from(new Set(Object.entries(keyDict).flatMap(([keyName, contractIDs]) => { + const keyId = findKeyIdByName(targetState, keyName) + if ( + // Does the key exist? (i.e., is it a current key) + keyId && + // Is it an encryption key? (signing keys don't build up a + // potentially invalid state because the private key isn't + // required for validation; however, missing encryption keys + // prevent message processing) + targetState._vm.authorizedKeys[keyId].purpose.includes('enc') && + // Is this a newly set key? (avoid re-syncing contracts that + // haven't been affected by the `OP_KEY_SHARE`) + targetState._vm.authorizedKeys[keyId]._notBeforeHeight >= newestEncryptionKeyHeight + ) { + return contractIDs + } + return [] + }))) + // Mark these contracts as dirty + contractIdsToUpdate.forEach((contractID) => { + const targetState = cheloniaState[contractID] + if (!targetState) return + if (!has(targetState, '_volatile')) config.reactiveSet(targetState, '_volatile', Object.create(null)) + config.reactiveSet(targetState._volatile!, 'dirty', true) + }) + + // Since we have received new keys, the current contract state might be wrong, so we need to remove the contract and resync + // Note: The following may be problematic when several tabs are open + // sharing the same state. This is more of a general issue in this + // situation, not limited to the following sequence of events + if (self.subscriptionSet.has(v.contractID)) { + const resync = sbp('chelonia/private/queueEvent', v.contractID, [ + 'chelonia/private/in/syncContract', v.contractID + ]).then(() => { + // Now, if we're subscribed to any of the contracts that were + // marked as dirty, re-sync them + sbp('chelonia/private/out/sync', + contractIdsToUpdate.filter((contractID) => { + return self.subscriptionSet.has(contractID) + }), + { force: true, resync: true } + ).catch((e: unknown) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error('[chelonia] Error resyncing contracts with foreign key references after key rotation', e) + }) + }).catch((e: unknown) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`[chelonia] Error during sync for ${v.contractID} during OP_KEY_SHARE for ${contractID}`) + if (v.contractID === contractID) { + throw e + } + }) + + // If the keys received were for the current contract, we can't + // use queueEvent as we're already on that same queue + if (v.contractID !== contractID) { + await resync + } + } + } + + const previousVolatileState = targetState?._volatile + sbp('chelonia/private/queueEvent', v.contractID, ['chelonia/private/postKeyShare', v.contractID, mustResync ? previousVolatileState : null, signingKey]) + .then(() => { + // The CONTRACT_HAS_RECEIVED_KEYS event is placed on the queue for + // the current contract so that calling + // 'chelonia/contract/waitingForKeyShareTo' will give correct results + // (i.e., the event is processed after the state is written) + sbp('chelonia/private/queueEvent', contractID, () => { + sbp('okTurtles.events/emit', CONTRACT_HAS_RECEIVED_KEYS, { contractID: v.contractID, sharedWithContractID: contractID, signingKeyId, get signingKeyName () { return state._vm?.authorizedKeys?.[signingKeyId]?.name } }) + }).catch((e: unknown) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`[chelonia] Error while emitting the CONTRACT_HAS_RECEIVED_KEYS event for ${contractID}`, e) + }) + }) + }) + }, + [SPMessage.OP_KEY_REQUEST] (wv: SPOpKeyRequest) { + const data = config.unwrapMaybeEncryptedData(wv) + + // If we're unable to decrypt the OP_KEY_REQUEST, then still + // proceed to do accounting of invites + const v = data?.data || { contractID: '(private)', replyWith: { context: undefined }, request: '*' } + + const originatingContractID = v.contractID + + if (state._vm?.invites?.[signingKeyId]?.quantity != null) { + if (state._vm.invites[signingKeyId].quantity > 0) { + if ((--state._vm.invites[signingKeyId].quantity) <= 0) { + state._vm.invites[signingKeyId].status = INVITE_STATUS.USED + } + } else { + logEvtError(message, 'Ignoring OP_KEY_REQUEST because it exceeds allowed quantity: ' + originatingContractID) + return + } + } + + if (state._vm?.invites?.[signingKeyId]?.expires != null) { + if (state._vm.invites[signingKeyId].expires < Date.now()) { + logEvtError(message, 'Ignoring OP_KEY_REQUEST because it expired at ' + state._vm.invites[signingKeyId].expires + ': ' + originatingContractID) + return + } + } + + // If skipping porocessing or if the message is outgoing, there isn't + // anything else to do + if (config.skipActionProcessing || direction === 'outgoing') { + return + } + + // Outgoing messages don't have a context attribute + if (!has(v.replyWith, 'context')) { + logEvtError(message, 'Ignoring OP_KEY_REQUEST because it is missing the context attribute') + return + } + + const context = v.replyWith.context + + if (data && (!Array.isArray(context) || context[0] !== originatingContractID)) { + logEvtError(message, 'Ignoring OP_KEY_REQUEST because it is signed by the wrong contract') + return + } + + if (v.request !== '*') { + logEvtError(message, 'Ignoring OP_KEY_REQUEST because it has an unsupported request attribute', v.request) + return + } + + if (!state._vm.pendingKeyshares) state._vm.pendingKeyshares = Object.create(null) + + state._vm.pendingKeyshares![message.hash()] = context + ? [ + // Full-encryption (i.e., KRS encryption) requires that this request + // was encrypted and that the invite is marked as private + !!data?.encryptionKeyId, + message.height(), + signingKeyId, + context + ] + : [ + !!data?.encryptionKeyId, + message.height(), + signingKeyId + ] + + // Call 'chelonia/private/respondToAllKeyRequests' after sync + if (data) { + internalSideEffectStack?.push(() => { + self.setPostSyncOp(contractID, 'respondToAllKeyRequests-' + message.contractID(), ['chelonia/private/respondToAllKeyRequests', contractID]) + }) + } + }, + [SPMessage.OP_KEY_REQUEST_SEEN] (wv: SPOpKeyRequestSeen) { + if (config.skipActionProcessing) { + return + } + // TODO: Handle boolean (success) value + + const data = config.unwrapMaybeEncryptedData(wv) + if (!data) return + const v = data.data + + if (state._vm.pendingKeyshares && v.keyRequestHash in state._vm.pendingKeyshares) { + const hash = v.keyRequestHash + const pending = state._vm.pendingKeyshares[hash] + delete state._vm.pendingKeyshares[hash] + if (pending.length !== 4) return + + // If we were able to respond, clean up responders + const keyId = pending[2] + const originatingContractID = pending[3][0] + if (Array.isArray(state._vm?.invites?.[keyId]?.responses)) { + state._vm?.invites?.[keyId]?.responses.push(originatingContractID) + } + + if (!has(state._vm, 'keyshares')) state._vm.keyshares = Object.create(null) + + const success = v.success + + state._vm.keyshares![hash] = { + contractID: originatingContractID, + height, + success, + ...(success && { + hash: v.keyShareHash + }) + } + } + }, + [SPMessage.OP_PROP_DEL]: notImplemented, + [SPMessage.OP_PROP_SET] (v: SPOpPropSet) { + if (!state._vm.props) state._vm.props = {} + state._vm.props[v.key] = v.value + }, + [SPMessage.OP_KEY_ADD] (v: SPOpKeyAdd) { + const keys = keysToMap.call(self, v, height, state._vm.authorizedKeys) + const keysArray = Object.values(v) as SPKey[] + keysArray.forEach((k) => { + if (has(state._vm.authorizedKeys, k.id) && state._vm.authorizedKeys[k.id]._notAfterHeight == null) { + throw new ChelErrorWarning('Cannot use OP_KEY_ADD on existing keys. Key ID: ' + k.id) + } + }) + validateKeyAddPermissions.call(self, contractID, signingKey, state, v) + state._vm.authorizedKeys = { ...state._vm.authorizedKeys, ...keys } + keyAdditionProcessor.call(self, message, hash, v, state, contractID, signingKey, internalSideEffectStack) + }, + [SPMessage.OP_KEY_DEL] (v: SPOpKeyDel) { + if (!state._vm.authorizedKeys) state._vm.authorizedKeys = Object.create(null) + if (!state._volatile) state._volatile = Object.create(null) + if (!state._volatile!.pendingKeyRevocations) state._volatile!.pendingKeyRevocations = Object.create(null) + validateKeyDelPermissions.call(self, contractID, signingKey, state, v) + const keyIds = v.map((k) => { + const data = config.unwrapMaybeEncryptedData(k) + if (!data) return undefined + return data.data + }).filter((keyId): keyId is string => { + if (!keyId || typeof keyId !== 'string') return false + if (!has(state._vm.authorizedKeys, keyId) || state._vm.authorizedKeys[keyId]._notAfterHeight != null) { + console.warn('Attempted to delete non-existent key from contract', { contractID, keyId }) + return false + } + return true + }) + + keyIds.forEach((keyId) => { + const key = state._vm.authorizedKeys[keyId] + state._vm.authorizedKeys[keyId]._notAfterHeight = height + + if (has(state._volatile!.pendingKeyRevocations, keyId)) { + delete state._volatile!.pendingKeyRevocations![keyId] + } + + // Are we deleting a foreign key? If so, we also need to remove + // the operation from (1) _volatile.watch (on the other contract) + // and (2) pendingWatch + if (key.foreignKey) { + const fkUrl = new URL(key.foreignKey) + const foreignContract = fkUrl.pathname + const foreignKeyName = fkUrl.searchParams.get('keyName') + + if (!foreignContract || !foreignKeyName) throw new Error('Invalid foreign key: missing contract or key name') + + internalSideEffectStack?.push(() => { + sbp('chelonia/private/queueEvent', foreignContract, () => { + const rootState = sbp(config.stateSelector) as ChelRootState + if (Array.isArray(rootState[foreignContract]?._volatile?.watch)) { + // Stop watching events for this key + const oldWatch = rootState[foreignContract]!._volatile!.watch! + rootState[foreignContract]!._volatile!.watch = oldWatch.filter(([name, cID]) => name !== foreignKeyName || cID !== contractID) + if (oldWatch.length !== rootState[foreignContract]._volatile!.watch!.length) { + // If the number of foreign keys changed, maybe there's no + // reason to remain subscribed to this contract. In this + // case, attempt to release it. + sbp('chelonia/contract/release', foreignContract, { try: true }).catch((e: unknown) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error(`[chelonia] Error at OP_KEY_DEL internalSideEffectStack while attempting to release foreign contract ${foreignContract}`, e) + }) + } + } + }).catch((e: unknown) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error('Error stopping watching events after removing key', { contractID, foreignContract, foreignKeyName, fkUrl }, e) + }) + }) + + const pendingWatch = state._vm.pendingWatch?.[foreignContract] + if (pendingWatch) { + state._vm.pendingWatch![foreignContract] = pendingWatch.filter(([, kId]) => kId !== keyId) + } + } + + // Set the status to revoked for invite keys + if (key.name.startsWith('#inviteKey-') && state._vm.invites![key.id]) { + state._vm.invites![key.id].status = INVITE_STATUS.REVOKED + } + }) + + // Check state._volatile.watch for contracts that should be + // mirroring this operation + if (Array.isArray(state._volatile?.watch)) { + const updatedKeysMap: Record = Object.create(null) + + keyIds.forEach((keyId) => { + updatedKeysMap[state._vm.authorizedKeys[keyId!].name] = { + name: state._vm.authorizedKeys[keyId!].name, + oldKeyId: keyId + } + }) + + keyRotationHelper(contractID, state, config, updatedKeysMap, [SPMessage.OP_KEY_DEL], 'chelonia/out/keyDel', (name) => updatedKeysMap[name[0]].oldKeyId, internalSideEffectStack) + } + }, + [SPMessage.OP_KEY_UPDATE] (v: SPOpKeyUpdate) { + if (!state._volatile) state._volatile = Object.create(null) + if (!state._volatile!.pendingKeyRevocations) state._volatile!.pendingKeyRevocations = Object.create(null) + const [updatedKeys, updatedMap] = validateKeyUpdatePermissions.call(self, contractID, signingKey, state, v) + const keysToDelete = Object.values(updatedMap) + for (const keyId of keysToDelete) { + if (has(state._volatile!.pendingKeyRevocations, keyId)) { + delete state._volatile!.pendingKeyRevocations![keyId] + } + + state._vm.authorizedKeys[keyId]._notAfterHeight = height + } + for (const key of updatedKeys) { + if (!has(state._vm.authorizedKeys, key.id)) { + key._notBeforeHeight = height + state._vm.authorizedKeys[key.id] = cloneDeep(key) + } + } + keyAdditionProcessor.call(self, message, hash, updatedKeys, state, contractID, signingKey, internalSideEffectStack) + + // Check state._volatile.watch for contracts that should be + // mirroring this operation + if (Array.isArray(state._volatile?.watch)) { + const updatedKeysMap: Record = Object.create(null) + + updatedKeys.forEach((key) => { + if (key.data) { + updatedKeysMap[key.name] = cloneDeep(key) + updatedKeysMap[key.name].oldKeyId = updatedMap[key.id] + } + }) + + keyRotationHelper(contractID, state, config, updatedKeysMap, [SPMessage.OP_KEY_UPDATE], 'chelonia/out/keyUpdate', (name) => ({ + name: name[1], + oldKeyId: updatedKeysMap[name[0]].oldKeyId, + id: updatedKeysMap[name[0]].id, + data: updatedKeysMap[name[0]].data + }), internalSideEffectStack) + } + }, + [SPMessage.OP_PROTOCOL_UPGRADE]: notImplemented + } + if (!this.config.skipActionProcessing && !this.manifestToContract[manifestHash]) { + const rootState = sbp(this.config.stateSelector) as ChelRootState + // Having rootState.contracts[contractID] is not enough to determine we + // have previously synced this contract, as reference counts are also + // stored there. Hence, we check for the presence of 'type' + if (!contractName) { + contractName = has(rootState.contracts, contractID) && rootState.contracts[contractID] && has(rootState.contracts[contractID], 'type') + ? rootState.contracts[contractID].type + : opT === SPMessage.OP_CONTRACT + ? (opV as SPOpContract).type + : '' + } + if (!contractName) { + throw new Error(`Unable to determine the name for a contract and refusing to load it (contract ID was ${contractID} and its manifest hash was ${manifestHash})`) + } + await sbp('chelonia/private/loadManifest', contractName, manifestHash) + } + let processOp = true + if (config.preOp) { + processOp = config.preOp(message, state) !== false && processOp + } + + let signingKey: ChelContractKey + // Signature verification + { + // This sync code has potential issues + // The first issue is that it can deadlock if there are circular references + // The second issue is that it doesn't handle key rotation. If the key used for signing is invalidated / removed from the originating contract, we won't have it in the state + // Both of these issues can be resolved by introducing a parameter with the message ID the state is based on. This requires implementing a separate, ephemeral, state container for operations that refer to a different contract. + // The difficulty of this is how to securely determine the message ID to use. + // The server can assist with this. + + const stateForValidation = opT === SPMessage.OP_CONTRACT && !state?._vm?.authorizedKeys + ? { + _vm: { + authorizedKeys: keysToMap.call(this, (opV as SPOpContract).keys, height) + } + } + : state + + // Verify that the signing key is found, has the correct purpose and is + // allowed to sign this particular operation + if (!validateKeyPermissions(message, config, stateForValidation, signingKeyId, opT, opV)) { + throw new Error('No matching signing key was defined') + } + + signingKey = stateForValidation._vm.authorizedKeys[signingKeyId] + } + + if (config[`preOp_${opT}`]) { + processOp = config[`preOp_${opT}`]!(message, state) !== false && processOp + } + if (processOp) { + await (opFns[opT] as (op: unknown) => Promise)(opV) + config.postOp?.(message, state) + config[`postOp_${opT}`]?.(message, state) // hack to fix syntax highlighting ` + } + }, + 'chelonia/private/in/enqueueHandleEvent': function (contractID: string, event: string) { + // make sure handleEvent is called AFTER any currently-running invocations + // to 'chelonia/private/out/sync', to prevent gi.db from throwing + // "bad previousHEAD" errors + return sbp('chelonia/private/queueEvent', contractID, async () => { + await sbp('chelonia/private/in/handleEvent', contractID, event) + // Before the next operation is enqueued, enqueue post sync ops. This + // makes calling `/wait` more reliable + sbp('chelonia/private/enqueuePostSyncOps', contractID) + }) + }, + 'chelonia/private/in/syncContract': async function (this: CheloniaContext, contractID: string, params?: { force?: boolean, resync?: boolean }): Promise { + const state = sbp(this.config.stateSelector) + if (state.contracts[contractID] === null) { + throw new ChelErrorResourceGone('Cannot sync permanently deleted contract ' + contractID) + } + + try { + this.currentSyncs[contractID] = { firstSync: !state.contracts[contractID]?.type } + sbp('okTurtles.events/emit', CONTRACT_IS_SYNCING, contractID, true) + const currentVolatileState = state[contractID]?._volatile || Object.create(null) + // If the dirty flag is set (indicating that new encryption keys were received), + // we remove the current state before syncing (this has the effect of syncing + // from the beginning, recreating the entire state). When this is the case, + // the _volatile state is preserved + if (currentVolatileState?.dirty || params?.resync) { + delete currentVolatileState.dirty + currentVolatileState.resyncing = true + sbp('chelonia/private/removeImmediately', contractID, { resync: true }) + this.config.reactiveSet(state, contractID, Object.create(null)) + this.config.reactiveSet(state[contractID], '_volatile', currentVolatileState) + } + + const { HEAD: latestHEAD } = await sbp('chelonia/out/latestHEADInfo', contractID) + console.debug(`[chelonia] syncContract: ${contractID} latestHash is: ${latestHEAD}`) + // there is a chance two users are logged in to the same machine and must check their contracts before syncing + const { HEAD: recentHEAD, height: recentHeight } = state.contracts[contractID] || {} + const isSubscribed = this.subscriptionSet.has(contractID) + if (!isSubscribed) { + const entry = this.pending.find((entry) => entry?.contractID === contractID) + // we're syncing a contract for the first time, make sure to add to pending + // so that handleEvents knows to expect events from this contract + if (!entry) { + this.pending.push({ contractID }) + } + } + this.postSyncOperations[contractID] = this.postSyncOperations[contractID] ?? Object.create(null) + + if (latestHEAD !== recentHEAD) { + console.debug(`[chelonia] Synchronizing Contract ${contractID}: our recent was ${recentHEAD || 'undefined'} but the latest is ${latestHEAD}`) + // TODO: fetch events from localStorage instead of server if we have them + const eventsStream = sbp('chelonia/out/eventsAfter', contractID, { sinceHeight: recentHeight ?? 0, sinceHash: recentHEAD ?? contractID }) + // Sanity check: verify event with latest hash exists in list of events + // TODO: using findLastIndex, it will be more clean but it needs Cypress 9.7+ which has bad performance + // https://docs.cypress.io/guides/references/changelog#9-7-0 + // https://github.com/cypress-io/cypress/issues/22868 + let latestHashFound = false + const eventReader = eventsStream.getReader() + // remove the first element in cases where we are not getting the contract for the first time + for (let skip = has(state.contracts, contractID) && has(state.contracts[contractID], 'HEAD'); ; skip = false) { + const { done, value: event } = await eventReader.read() + if (done) { + if (!latestHashFound) { + throw new ChelErrorForkedChain(`expected hash ${latestHEAD} in list of events for contract ${contractID}`) + } + break + } + if (!latestHashFound) { + latestHashFound = SPMessage.deserializeHEAD(event).hash === latestHEAD + } + if (skip) continue + // this must be called directly, instead of via enqueueHandleEvent + await sbp('chelonia/private/in/handleEvent', contractID, event) + } + } else if (!isSubscribed) { + this.subscriptionSet.add(contractID) + sbp('okTurtles.events/emit', CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { added: [contractID], removed: [] }) + const entryIndex = this.pending.findIndex((entry) => entry?.contractID === contractID) + if (entryIndex !== -1) { + this.pending.splice(entryIndex, 1) + } + console.debug(`[chelonia] added already synchronized ${contractID} to subscription set`) + } else { + console.debug(`[chelonia] contract ${contractID} was already synchronized`) + } + + // Do not await here as the post-sync ops might themselves might be + // waiting on the same queue, causing a deadlock + sbp('chelonia/private/enqueuePostSyncOps', contractID) + } catch (e) { + console.error(`[chelonia] syncContract error: ${(e as Error).message || e}`, e) + this.config.hooks.syncContractError?.(e, contractID) + throw e + } finally { + if (state[contractID]?._volatile?.resyncing) { + this.config.reactiveDel(state[contractID]._volatile, 'resyncing') + } + delete this.currentSyncs[contractID] + sbp('okTurtles.events/emit', CONTRACT_IS_SYNCING, contractID, false) + } + }, + 'chelonia/private/enqueuePostSyncOps': function (this: CheloniaContext, contractID: string) { + if (!has(this.postSyncOperations, contractID)) return + + // Iterate over each post-sync operation associated with the given contractID. + Object.entries(this.postSyncOperations[contractID]).forEach(([key, op]) => { + // Remove the operation which is about to be handled so that subsequent + // calls to this selector don't result in repeat calls to the post-sync op + delete this.postSyncOperations[contractID][key] + + // Queue the current operation for execution. + // Note that we do _not_ await because it could be unsafe to do so. + // If the operation fails for some reason, just log the error. + sbp('chelonia/private/queueEvent', contractID, op).catch((e: unknown) => { + console.error(`Post-sync operation for ${contractID} failed`, { contractID, op, error: e }) + }) + }) + }, + 'chelonia/private/watchForeignKeys': function (this: CheloniaContext, externalContractID: string) { + const state = sbp(this.config.stateSelector) + const externalContractState = state[externalContractID] + + const pendingWatch = externalContractState?._vm?.pendingWatch + + if (!pendingWatch || !Object.keys(pendingWatch).length) return + + const signingKey = findSuitableSecretKeyId(externalContractState, [SPMessage.OP_KEY_DEL], ['sig']) + const canMirrorOperations = !!signingKey + + // Only sync contract if we are actually able to mirror key operations + // This avoids exponentially growing the number of contracts that we need + // to be subscribed to. + // Otherwise, every time there is a foreign key, we would subscribe to that + // contract, plus the contracts referenced by the foreign keys of that + // contract, plus those contracts referenced by the foreign keys of those + // other contracts and so on. + if (!canMirrorOperations) { + console.info('[chelonia/private/watchForeignKeys]: Returning as operations cannot be mirrored', { externalContractID }) + return + } + + // For each pending watch operation, queue a synchronization event in the + // respective contract queue + Object.entries(pendingWatch).forEach(([contractID, keys]) => { + if ( + !Array.isArray(keys) || + // Check that the keys exist and haven't been revoked + !keys.reduce((acc, [, id]) => { + return acc || has(externalContractState._vm.authorizedKeys, id) + }, false) + ) { + console.info('[chelonia/private/watchForeignKeys]: Skipping as none of the keys to watch exist', { + externalContractID, + contractID + }) + return + } + + sbp('chelonia/private/queueEvent', contractID, ['chelonia/private/in/syncContractAndWatchKeys', contractID, externalContractID]).catch((e: unknown) => { + console.error(`Error at syncContractAndWatchKeys for contractID ${contractID} and externalContractID ${externalContractID}`, e) + }) + }) + }, + 'chelonia/private/in/syncContractAndWatchKeys': async function (this: CheloniaContext, contractID: string, externalContractID: string) { + const rootState = sbp(this.config.stateSelector) as ChelRootState + const externalContractState = rootState[externalContractID] + const pendingWatch = externalContractState?._vm?.pendingWatch?.[contractID]?.splice(0) + + // We duplicate the check in 'chelonia/private/watchForeignKeys' because + // new events may have been received in the meantime. This avoids + // unnecessarily subscribing to the contract + if ( + !Array.isArray(pendingWatch) || + // Check that the keys exist and haven't been revoked + !pendingWatch.reduce((acc, [, id]) => { + return acc || ( + has(externalContractState._vm.authorizedKeys, id) && + findKeyIdByName(externalContractState, externalContractState._vm.authorizedKeys[id].name) != null + ) + }, false) + ) { + console.info('[chelonia/private/syncContractAndWatchKeys]: Skipping as none of the keys to watch exist', { + externalContractID, + contractID + }) + return + } + + // We check this.subscriptionSet to see if we're already + // subscribed to the contract; if not, we call sync. + if (!this.subscriptionSet.has(contractID)) { + await sbp('chelonia/private/in/syncContract', contractID) + } + + const contractState = rootState[contractID] + const keysToDelete: string[] = [] + const keysToUpdate: string[] = [] + + pendingWatch.forEach(([keyName, externalId]) => { + // Does the key exist? If not, it has probably been removed and instead + // of waiting, we need to remove it ourselves + const keyId = findKeyIdByName(contractState, keyName) + if (!keyId) { + keysToDelete.push(externalId) + return + } else if (keyId !== externalId) { + // Or, the key has been updated and we need to update it in the external + // contract as well + keysToUpdate.push(externalId) + } + + // Add keys to watchlist as another contract is waiting on these + // operations + if (!contractState._volatile) { + this.config.reactiveSet(contractState, '_volatile', Object.create(null, { watch: { value: [[keyName, externalContractID]], configurable: true, enumerable: true, writable: true } })) + } else { + if (!contractState._volatile.watch) this.config.reactiveSet(contractState._volatile, 'watch', [[keyName, externalContractID]]) + if (Array.isArray(contractState._volatile.watch) && !contractState._volatile.watch.find((v) => v[0] === keyName && v[1] === externalContractID)) contractState._volatile.watch.push([keyName, externalContractID]) + } + }) + + // If there are keys that need to be revoked, queue an event to handle the + // deletion + if (keysToDelete.length || keysToUpdate.length) { + if (!externalContractState._volatile) { + this.config.reactiveSet(externalContractState, '_volatile', Object.create(null)) + } + if (!externalContractState._volatile!.pendingKeyRevocations) { + this.config.reactiveSet(externalContractState._volatile!, 'pendingKeyRevocations', Object.create(null)) + } + keysToDelete.forEach((id) => this.config.reactiveSet(externalContractState._volatile!.pendingKeyRevocations!, id, 'del')) + keysToUpdate.forEach((id) => this.config.reactiveSet(externalContractState._volatile!.pendingKeyRevocations!, id, true)) + + sbp('chelonia/private/queueEvent', externalContractID, ['chelonia/private/deleteOrRotateRevokedKeys', externalContractID]).catch((e: unknown) => { + console.error(`Error at deleteOrRotateRevokedKeys for contractID ${contractID} and externalContractID ${externalContractID}`, e) + }) + } + }, + // The following function gets called when we start watching a contract for + // foreign keys for the first time, and it ensures that, at the point the + // watching starts, keys are in sync between the two contracts (later on, + // this will be handled automatically for incoming OP_KEY_DEL and + // OP_KEY_UPDATE). + // For any given foreign key, there are three possible states: + // 1. The key is in sync with the foreign contract. In this case, there's + // nothing left to do. + // 2. The key has been rotated in the foreign contract (replaced by another + // key of the same name). We need to mirror this operation manually + // since watching only affects new messages we receive. + // 3. The key has been removed in the foreign contract. We also need to + // mirror the operation. + 'chelonia/private/deleteOrRotateRevokedKeys': function (this: CheloniaContext, contractID: string) { + const rootState = sbp(this.config.stateSelector) as ChelRootState + const contractState = rootState[contractID] + const pendingKeyRevocations = contractState?._volatile?.pendingKeyRevocations + + if (!pendingKeyRevocations || Object.keys(pendingKeyRevocations).length === 0) return + + // First, we handle keys that have been rotated + const keysToUpdate: string[] = Object.entries(pendingKeyRevocations).filter(([, v]) => v === true).map(([id]) => id) + + // Aggregate the keys that we can update to send them in a single operation + const [, keyUpdateSigningKeyId, keyUpdateArgs] = keysToUpdate.reduce((acc, keyId) => { + const key = contractState._vm?.authorizedKeys?.[keyId] + if (!key || !key.foreignKey) return acc + const foreignKey = String(key.foreignKey) + const fkUrl = new URL(foreignKey) + const foreignContractID = fkUrl.pathname + const foreignKeyName = fkUrl.searchParams.get('keyName') + if (!foreignKeyName) throw new Error('Missing foreign key name') + const foreignState = rootState[foreignContractID] + if (!foreignState) return acc + const fKeyId = findKeyIdByName(foreignState, foreignKeyName) + if (!fKeyId) { + // Key was deleted; mark it for deletion + if (pendingKeyRevocations[keyId] === true) { + this.config.reactiveSet(pendingKeyRevocations, keyId, 'del') + } + return acc + } + + const [currentRingLevel, currentSigningKeyId, currentKeyArgs] = acc + const ringLevel = Math.min(currentRingLevel, key.ringLevel ?? Number.POSITIVE_INFINITY) + if (ringLevel >= currentRingLevel) { + currentKeyArgs.push({ + name: key.name, + oldKeyId: keyId, + id: fKeyId, + data: foreignState._vm.authorizedKeys[fKeyId].data + }) + return [currentRingLevel, currentSigningKeyId, currentKeyArgs] + } else if (Number.isFinite(ringLevel)) { + const signingKeyId = findSuitableSecretKeyId(contractState, [SPMessage.OP_KEY_UPDATE], ['sig'], ringLevel) + if (signingKeyId) { + currentKeyArgs.push({ + name: key.name, + oldKeyId: keyId, + id: fKeyId, + data: foreignState._vm.authorizedKeys[fKeyId].data + }) + return [ringLevel, signingKeyId, currentKeyArgs] + } + } + return acc + }, [Number.POSITIVE_INFINITY, '', [] as { name: string, oldKeyId: string, id: string, data: string }[]]) + + if (keyUpdateArgs.length !== 0) { + const contractName = contractState._vm.type + + // This is safe to do without await because it's sending an operation + // Using await could deadlock when retrying to send the message + sbp('chelonia/out/keyUpdate', { contractID, contractName, data: keyUpdateArgs, signingKeyId: keyUpdateSigningKeyId }).catch((e: unknown) => { + console.error(`[chelonia/private/deleteOrRotateRevokedKeys] Error sending OP_KEY_UPDATE for ${contractID}`, (e as Error).message) + }) + } + + // And then, we handle keys that have been deleted + const keysToDelete = Object.entries(pendingKeyRevocations).filter(([, v]) => v === 'del').map(([id]) => id) + + // Aggregate the keys that we can delete to send them in a single operation + const [, keyDelSigningKeyId, keyIdsToDelete] = keysToDelete.reduce((acc, keyId) => { + const [currentRingLevel, currentSigningKeyId, currentKeyIds] = acc + const ringLevel = Math.min(currentRingLevel, contractState._vm?.authorizedKeys?.[keyId]?.ringLevel ?? Number.POSITIVE_INFINITY) + if (ringLevel >= currentRingLevel) { + currentKeyIds.push(keyId) + return [currentRingLevel, currentSigningKeyId, currentKeyIds] + } else if (Number.isFinite(ringLevel)) { + const signingKeyId = findSuitableSecretKeyId(contractState, [SPMessage.OP_KEY_DEL], ['sig'], ringLevel) + if (signingKeyId) { + currentKeyIds.push(keyId) + return [ringLevel, signingKeyId, currentKeyIds] + } + } + return acc + }, [Number.POSITIVE_INFINITY, '', [] as string[]]) + + if (keyIdsToDelete.length !== 0) { + const contractName = contractState._vm.type + + // This is safe to do without await because it's sending an operation + // Using await could deadlock when retrying to send the message + sbp('chelonia/out/keyDel', { contractID, contractName, data: keyIdsToDelete, signingKeyId: keyDelSigningKeyId }).catch((e: unknown) => { + console.error(`[chelonia/private/deleteRevokedKeys] Error sending OP_KEY_DEL for ${contractID}`, (e as Error).message) + }) + } + }, + 'chelonia/private/respondToAllKeyRequests': function (this: CheloniaContext, contractID: string) { + const state = sbp(this.config.stateSelector) + const contractState = state[contractID] ?? {} + + const pending = contractState?._vm?.pendingKeyshares + if (!pending) return + + const signingKeyId = findSuitableSecretKeyId(contractState, [SPMessage.OP_ATOMIC, SPMessage.OP_KEY_REQUEST_SEEN, SPMessage.OP_KEY_SHARE], ['sig']) + + if (!signingKeyId) { + console.log('Unable to respond to key request because there is no suitable secret key with OP_KEY_REQUEST_SEEN permission') + return + } + + Object.entries(pending).map(([hash, entry]) => { + if (!Array.isArray(entry) || entry.length !== 4) { + return undefined + } + + const [,,, [originatingContractID]] = entry as [boolean, number, string, [string, object, number, string]] + + return sbp('chelonia/private/queueEvent', originatingContractID, ['chelonia/private/respondToKeyRequest', contractID, signingKeyId, hash]).catch((e: unknown) => { + console.error(`respondToAllKeyRequests: Error responding to key request ${hash} from ${originatingContractID} to ${contractID}`, e) + }) + }) + }, + 'chelonia/private/respondToKeyRequest': async function (this: CheloniaContext, contractID: string, signingKeyId: string, hash: string) { + const state = sbp(this.config.stateSelector) as ChelRootState + const contractState = state[contractID] + const entry = contractState?._vm?.pendingKeyshares?.[hash] + const instance = this._instance + + if (!Array.isArray(entry) || entry.length !== 4) { + return + } + + const [keyShareEncryption, height, , [originatingContractID, rv, originatingContractHeight, headJSON]] = entry as [boolean, number, string, [string, object, number, string]] + entry.pop() + + const krsEncryption = !!contractState._vm.authorizedKeys?.[signingKeyId]?._private + + // 1. Sync (originating) identity contract + + await sbp('chelonia/private/in/syncContract', originatingContractID) + if (instance !== this._instance) return + + const originatingState = state[originatingContractID] + const contractName = state.contracts[contractID].type + const originatingContractName = originatingState._vm.type + + const v = signedIncomingData<{ encryptionKeyId: string, responseKey: [string, string] }>(originatingContractID, originatingState, rv as unknown as { _signedData: [string, string, string] }, originatingContractHeight, headJSON).valueOf() + + // 2. Verify 'data' + const { encryptionKeyId } = v + + const responseKey = encryptedIncomingData(contractID, contractState, v.responseKey, height, this.transientSecretKeys, headJSON).valueOf() + + const deserializedResponseKey = deserializeKey(responseKey) + const responseKeyId = keyId(deserializedResponseKey) + + // This is safe to do without await because it's sending actions + // If we had await it could deadlock when retrying to send the event + Promise.resolve().then(() => { + if (instance !== this._instance) return + if (!has(originatingState._vm.authorizedKeys, responseKeyId) || originatingState._vm.authorizedKeys[responseKeyId]._notAfterHeight != null) { + throw new Error(`Unable to respond to key request for ${originatingContractID}. Key ${responseKeyId} is not valid.`) + } + + // We don't need to worry about persistence (if it was an outgoing + // message) here as this is done from an internal side-effect. + sbp('chelonia/storeSecretKeys', new Secret([ + { key: deserializedResponseKey } + ])) + + const keys = pick( + state.secretKeys, + Object.entries(contractState._vm.authorizedKeys) + .filter(([, key]) => !!key.meta?.private?.shareable) + .map(([kId]) => kId) + ) + + if (!keys || Object.keys(keys).length === 0) { + console.info('respondToAllKeyRequests: no keys to share', { contractID, originatingContractID }) + return + } + + const keySharePayload = { + contractID, + keys: Object.entries(keys).map(([keyId, key]: [string, unknown]) => ({ + id: keyId, + meta: { + private: { + content: encryptedOutgoingData(originatingContractID, encryptionKeyId, key), + shareable: true + } + } + })), + keyRequestHash: hash, + keyRequestHeight: height + } + + // 3. Send OP_KEY_SHARE to identity contract + if (!contractState?._vm?.pendingKeyshares?.[hash]) { + // While we were getting ready, another client may have shared the keys + return + } + + return keySharePayload + }).then((keySharePayload) => { + if (instance !== this._instance || !keySharePayload) return + + return sbp('chelonia/out/keyShare', { + contractID: originatingContractID, + contractName: originatingContractName, + data: keyShareEncryption + ? encryptedOutgoingData( + originatingContractID, + findSuitablePublicKeyIds(originatingState, [SPMessage.OP_KEY_SHARE], ['enc'])?.[0] || '', + keySharePayload + ) + : keySharePayload, + signingKeyId: responseKeyId + }).then((msg: SPMessage) => { + if (instance !== this._instance) return + + // 4(i). Remove originating contract and update current contract with information + const payload = { keyRequestHash: hash, keyShareHash: msg.hash(), success: true } + const connectionKeyPayload = { + contractID: originatingContractID, + keys: [ + { + id: responseKeyId, + meta: { + private: { + content: encryptedOutgoingData(contractID, findSuitablePublicKeyIds(contractState, [SPMessage.OP_KEY_REQUEST_SEEN], ['enc'])?.[0] || '', responseKey), + shareable: true + } + } + } + ] + } + + // This is safe to do without await because it's sending an action + // If we had await it could deadlock when retrying to send the event + sbp('chelonia/out/atomic', { + contractID, + contractName, + signingKeyId, + data: [ + [ + 'chelonia/out/keyRequestResponse', + { + data: + krsEncryption + ? encryptedOutgoingData( + contractID, + findSuitablePublicKeyIds(contractState, [SPMessage.OP_KEY_REQUEST_SEEN], ['enc'])?.[0] || '', + payload + ) + : payload + } + ], + [ + // Upon successful key share, we want to share deserializedResponseKey + // with ourselves + 'chelonia/out/keyShare', + { + data: keyShareEncryption + ? encryptedOutgoingData( + contractID, + findSuitablePublicKeyIds(contractState, [SPMessage.OP_KEY_SHARE], ['enc'])?.[0] || '', + connectionKeyPayload + ) + : connectionKeyPayload + } + ] + ] + }).catch((e: unknown) => { + console.error('Error at respondToKeyRequest while sending keyRequestResponse', e) + }) + }) + }).catch((e: unknown) => { + console.error('Error at respondToKeyRequest', e) + const payload = { keyRequestHash: hash, success: false } + + // 4(ii). Remove originating contract and update current contract with information + if (!contractState?._vm?.pendingKeyshares?.[hash]) { + // While we were getting ready, another client may have shared the keys + return + } + + // This is safe to do without await because it's sending an action + // If we had await it could deadlock when retrying to send the event + sbp('chelonia/out/keyRequestResponse', { + contractID, + contractName, + signingKeyId, + data: krsEncryption + ? encryptedOutgoingData(contractID, findSuitablePublicKeyIds(contractState, [SPMessage.OP_KEY_REQUEST_SEEN], ['enc'])?.[0] || '', payload) + : payload + }).catch((e: unknown) => { + console.error('Error at respondToKeyRequest while sending keyRequestResponse in error handler', e) + }) + }) + }, + 'chelonia/private/in/handleEvent': async function (this: CheloniaContext, contractID: string, rawMessage: string) { + const state = sbp(this.config.stateSelector) + const { preHandleEvent, postHandleEvent, handleEventError } = this.config.hooks + let processingErrored = false + let message: SPMessage | undefined + // Errors in mutations result in ignored messages + // Errors in side effects result in dropped messages to be reprocessed + try { + // verify we're expecting to hear from this contract + if (!this.config.acceptAllMessages && !this.pending.some((entry) => entry?.contractID === contractID) && !this.subscriptionSet.has(contractID)) { + console.warn(`[chelonia] WARN: ignoring unexpected event for ${contractID}:`, rawMessage) + return + } + // contractStateCopy has a copy of the current contract state, or an empty + // object if the state doesn't exist. This copy will be used to apply + // any changes from processing the current event as well as when calling + // side-effects and, once everything is processed, it will be applied + // to the global state. Important note: because the state change is + // applied to the Vuex state only if process is successful (and after both + // process and the sideEffect finish), any sideEffects that need to the + // access the state should do so only through the state that is passed in + // to the call to the sideEffect, or through a call though queueInvocation + // (so that the side effect runs after the changes are applied) + const contractStateCopy = state[contractID] ? cloneDeep(state[contractID]) : Object.create(null) + // Now, deserialize the messsage + // The message is deserialized *here* and not earlier because deserialize + // constructs objects of signedIncomingData and encryptedIncomingData + // which are bound to the state. For some opcodes (such as OP_ATOMIC), the + // state could change in ways that are significant for further processing, + // so those objects need to be bound to the state copy (which is mutated) + // as opposed to the the root state (which is mutated only after + // processing is done). + // For instance, let's say the message contains an OP_ATOMIC comprising + // two operations: OP_KEY_ADD (adding a signing key) and OP_ACTION_ENCRYPTED + // (with an inner signature using this key in OP_KEY_ADD). If the state + // is bound to the copy (as below), then by the time OP_ACTION_ENCRYPTED + // is processed, the result of OP_KEY_ADD has been applied to the state + // copy. If we didn't specify a state or instead grabbed it from the root + // state, then we wouldn't be able to process OP_ACTION_ENCRYPTED correctly, + // as we wouldn't know that the key is valid from that state, and the + // state copy (contractStateCopy) is only written to the root state after + // all processing has completed. + message = SPMessage.deserialize(rawMessage, this.transientSecretKeys, contractStateCopy, this.config.unwrapMaybeEncryptedData) + if (message.contractID() !== contractID) { + throw new Error(`[chelonia] Wrong contract ID. Expected ${contractID} but got ${message.contractID()}`) + } + if (!message.isFirstMessage() && (!has(state.contracts, contractID) || !has(state, contractID))) { + throw new ChelErrorUnrecoverable('The event is not for a first message but the contract state is missing') + } + preHandleEvent?.(message) + // the order the following actions are done is critically important! + // first we make sure we can save this message to the db + // if an exception is thrown here we do not need to revert the state + // because nothing has been processed yet + const proceed = handleEvent.checkMessageOrdering.call(this, message) + if (proceed === false) return + + // If the contract was marked as dirty, we stop processing + // The 'dirty' flag is set, possibly *by another contract*, indicating + // that a previously unknown encryption key has been received. This means + // that the current state is invalid (because it could changed based on + // this new information) and we must re-sync the contract. When this + // happens, we stop processing because the state will be regenerated. + if (state[contractID]?._volatile?.dirty) { + console.info(`[chelonia] Ignoring message ${message.description()} as the contract is marked as dirty`) + return + } + + const internalSideEffectStack = !this.config.skipSideEffects ? [] as (({ state, message }: { state: ChelContractState, message: SPMessage }) => void)[] : undefined + + // process the mutation on the state + // IMPORTANT: even though we 'await' processMutation, everything in your + // contract's 'process' function must be synchronous! The only + // reason we 'await' here is to dynamically load any new contract + // source / definitions specified by the SPMessage + missingDecryptionKeyIdsMap.delete(message) + try { + await handleEvent.processMutation.call(this, message, contractStateCopy, internalSideEffectStack) + } catch (e_) { + const e = e_ as Error + if (e?.name === 'ChelErrorDecryptionKeyNotFound') { + console.warn(`[chelonia] WARN '${e.name}' in processMutation for ${message.description()}: ${e.message}`, e, message.serialize()) + if (e.cause) { + const missingDecryptionKeyIds = missingDecryptionKeyIdsMap.get(message) + if (missingDecryptionKeyIds) { + missingDecryptionKeyIds.add(e.cause as string) + } else { + missingDecryptionKeyIdsMap.set(message, new Set([e.cause as string])) + } + } + } else { + console.error(`[chelonia] ERROR '${e.name}' in processMutation for ${message.description()}: ${e.message || e}`, e, message.serialize()) + } + // we revert any changes to the contract state that occurred, ignoring this mutation + console.warn(`[chelonia] Error processing ${message.description()}: ${message.serialize()}. Any side effects will be skipped!`) + if (this.config.strictProcessing) { + throw e + } + processingErrored = e?.name !== 'ChelErrorWarning' + this.config.hooks.processError?.(e, message, getMsgMeta.call(this, message, contractID, contractStateCopy)) + // special error that prevents the head from being updated, effectively killing the contract + if ( + e.name === 'ChelErrorUnrecoverable' || + e.name === 'ChelErrorForkedChain' || + message.isFirstMessage() + ) { + throw e + } + } + + // process any side-effects (these must never result in any mutation to the contract state!) + if (!processingErrored) { + // Gets run get when skipSideEffects is false + if (Array.isArray(internalSideEffectStack) && internalSideEffectStack.length > 0) { + await Promise.all(internalSideEffectStack.map(fn => Promise.resolve(fn({ state: contractStateCopy, message: message! })).catch((e_: unknown) => { + const e = e_ as Error + console.error(`[chelonia] ERROR '${e.name}' in internal side effect for ${message!.description()}: ${e.message}`, e, { message: message!.serialize() }) + }))) + } + + if (!this.config.skipActionProcessing && !this.config.skipSideEffects) { + await handleEvent.processSideEffects.call(this, message, contractStateCopy)?.catch((e_: unknown) => { + const e = e_ as Error + console.error(`[chelonia] ERROR '${e.name}' in sideEffect for ${message!.description()}: ${e.message}`, e, { message: message!.serialize() }) + // We used to revert the state and rethrow the error here, but we no longer do that + // see this issue for why: https://github.com/okTurtles/group-income/issues/1544 + this.config.hooks.sideEffectError?.(e, message!) + }) + } + } + + // We keep changes to the contract state and state.contracts as close as + // possible in the code to reduce the chances of still ending up with + // an inconsistent state if a sudden failure happens while this code + // is executing. In particular, everything in between should be synchronous. + // This block will apply all the changes related to modifying the state + // after an event has been processed: + // 1. Adding the messge to the DB + // 2. Applying changes to the contract state + // 3. Applying changes to rootState.contracts + try { + const state = sbp(this.config.stateSelector) + await handleEvent.applyProcessResult.call(this, { message, state, contractState: contractStateCopy, processingErrored, postHandleEvent }) + } catch (e_) { + const e = e_ as Error + console.error(`[chelonia] ERROR '${e.name}' for ${message.description()} marking the event as processed: ${e.message}`, e, { message: message.serialize() }) + } + } catch (e_) { + const e = e_ as Error + console.error(`[chelonia] ERROR in handleEvent: ${e.message || e}`, e) + try { + handleEventError?.(e, message) + } catch (e2) { + console.error('[chelonia] Ignoring user error in handleEventError hook:', e2) + } + throw e + } finally { + if (message) { + missingDecryptionKeyIdsMap.delete(message) + } + } + } +}) as string[] + +const eventsToReingest: string[] = [] +const reprocessDebounced = debounce((contractID) => sbp('chelonia/private/out/sync', contractID, { force: true }).catch((e: unknown) => { + console.error(`[chelonia] Error at reprocessDebounced for ${contractID}`, e) +}), 1000) + +const handleEvent = { + checkMessageOrdering (this: CheloniaContext, message: SPMessage) { + const contractID = message.contractID() + const hash = message.hash() + const height = message.height() + const state = sbp(this.config.stateSelector) + // The latest height we want to use is the one from `state.contracts` and + // not the one from the DB. The height in the state reflects the latest + // message that's been processed, which is desired here. On the other hand, + // the DB function includes the latest known message for that contract, + // which can be ahead of the latest message processed. + const latestProcessedHeight = state.contracts[contractID]?.height + if (!Number.isSafeInteger(height)) { + throw new ChelErrorDBBadPreviousHEAD(`Message ${hash} in contract ${contractID} has an invalid height.`) + } + // Avoid re-processing already processed messages + if ( + message.isFirstMessage() + // If this is the first message, the height is is expected not to exist + ? latestProcessedHeight != null + // If this isn't the first message, the height must not be lower than the + // current's message height. The check is negated to handle NaN values + : !(latestProcessedHeight < height) + ) { + // The web client may sometimes get repeated messages. If strict ordering + // isn't enabled, instead of throwing we return false. + // On the other hand, the server must enforce strict ordering. + if (!this.config.strictOrdering) { + return false + } + throw new ChelErrorAlreadyProcessed(`Message ${hash} with height ${height} in contract ${contractID} has already been processed. Current height: ${latestProcessedHeight}.`) + } + // If the message is from the future, add it to eventsToReingest + if ((latestProcessedHeight + 1) < height) { + if (this.config.strictOrdering) { + throw new ChelErrorDBBadPreviousHEAD(`Unexpected message ${hash} with height ${height} in contract ${contractID}: height is too high. Current height: ${latestProcessedHeight}.`) + } + // sometimes we simply miss messages, it's not clear why, but it happens + // in rare cases. So we attempt to re-sync this contract once + if (eventsToReingest.length > 100) { + throw new ChelErrorUnrecoverable('more than 100 different bad previousHEAD errors') + } + if (!eventsToReingest.includes(hash)) { + console.warn(`[chelonia] WARN bad previousHEAD for ${message.description()}, will attempt to re-sync contract to reingest message`) + eventsToReingest.push(hash) + reprocessDebounced(contractID) + return false // ignore the error for now + } else { + console.error(`[chelonia] ERROR already attempted to reingest ${message.description()}, will not attempt again!`) + throw new ChelErrorDBBadPreviousHEAD(`Already attempted to reingest ${hash}`) + } + } + const reprocessIdx = eventsToReingest.indexOf(hash) + if (reprocessIdx !== -1) { + console.warn(`[chelonia] WARN: successfully reingested ${message.description()}`) + eventsToReingest.splice(reprocessIdx, 1) + } + }, + async processMutation (this: CheloniaContext, message: SPMessage, state: ChelContractState, internalSideEffectStack?: (({ state, message }: { state: ChelContractState, message: SPMessage }) => void)[]) { + const contractID = message.contractID() + if (message.isFirstMessage()) { + // Allow having _volatile but nothing else if this is the first message, + // as we should be starting off with a clean state + if (Object.keys(state).some(k => k !== '_volatile')) { + throw new ChelErrorUnrecoverable(`state for ${contractID} is already set`) + } + } + await sbp('chelonia/private/in/processMessage', message, state, internalSideEffectStack) + }, + processSideEffects (this: CheloniaContext, message: SPMessage, state: ChelContractState) { + const opT = message.opType() + if (!([SPMessage.OP_ATOMIC, SPMessage.OP_ACTION_ENCRYPTED, SPMessage.OP_ACTION_UNENCRYPTED] as SPOpType[]).includes(opT)) { + return + } + + const contractID = message.contractID() + const manifestHash = message.manifest() + const hash = message.hash() + const height = message.height() + const signingKeyId = message.signingKeyId() + + const callSideEffect = async (field: SPOpActionEncrypted | SPOpActionUnencrypted) => { + const wv = this.config.unwrapMaybeEncryptedData(field) + if (!wv) return + let v = wv.data + let innerSigningKeyId: string | typeof undefined + if (isSignedData(v)) { + innerSigningKeyId = v.signingKeyId + v = v.valueOf() as ProtoSPOpActionUnencrypted + } + + const { action, data, meta } = v as ProtoSPOpActionUnencrypted + const mutation = { + data, + meta, + hash, + height, + contractID, + description: message.description(), + direction: message.direction(), + signingKeyId, + get signingContractID () { + return getContractIDfromKeyId(contractID, signingKeyId, state) + }, + innerSigningKeyId, + get innerSigningContractID () { + return getContractIDfromKeyId(contractID, innerSigningKeyId, state) + } + } as ChelContractSideeffectMutationObject + return await sbp(`${manifestHash}/${action}/sideEffect`, mutation, state) + } + const msg = Object(message.message()) + + if (opT !== SPMessage.OP_ATOMIC) { + return callSideEffect(msg) + } + + const reducer = (acc: (SPOpActionEncrypted | SPOpActionUnencrypted)[], [opT, opV]: SPOp) => { + if (([SPMessage.OP_ACTION_ENCRYPTED, SPMessage.OP_ACTION_UNENCRYPTED] as SPOpType[]).includes(opT)) { + acc.push(Object(opV)) + } + return acc + } + + const actionsOpV = msg.reduce(reducer, []) as (SPOpActionEncrypted | SPOpActionUnencrypted)[] + + return Promise.allSettled(actionsOpV.map((action) => callSideEffect(action))).then((results) => { + const errors = results.filter((r): r is PromiseRejectedResult => r.status === 'rejected').map((r) => r.reason) + if (errors.length > 0) { + console.error('Side-effect errors', contractID, errors) + throw new AggregateError(errors, `Error at side effects for ${contractID}`) + } + }) + }, + async applyProcessResult (this: CheloniaContext, { message, state, contractState, processingErrored, postHandleEvent }: { message: SPMessage, state: ChelRootState, contractState: ChelContractState, processingErrored: boolean, postHandleEvent?: { (x: SPMessage): void } | null | undefined }) { + const contractID = message.contractID() + const hash = message.hash() + const height = message.height() + + await sbp('chelonia/db/addEntry', message) + if (!processingErrored) { + // Once side-effects are called, we apply changes to the state. + // This means, as mentioned above, that retrieving the contract state + // via the global state will yield incorrect results. Doing things in + // this order ensures that incomplete processing of events (i.e., process + // + side-effects), e.g., due to sudden failures (like power outages, + // Internet being disconnected, etc.) aren't persisted. This allows + // us to recover by re-processing the event when these sudden failures + // happen + this.config.reactiveSet(state, contractID, contractState) + + try { + postHandleEvent?.(message) + } catch (e) { + console.error(`[chelonia] ERROR '${(e as Error).name}' for ${message.description()} in event post-handling: ${(e as Error).message}`, e, { message: message.serialize() }) + } + } + // whether or not there was an exception, we proceed ahead with updating the head + // you can prevent this by throwing an exception in the processError hook + if (message.isFirstMessage()) { + const { type } = message.opValue() as SPOpContract + if (!has(state.contracts, contractID)) { + this.config.reactiveSet(state.contracts, contractID, Object.create(null)) + } + this.config.reactiveSet(state.contracts[contractID], 'type', type) + console.debug(`contract ${type} registered for ${contractID}`) + } + if (message.isKeyOp()) { + this.config.reactiveSet(state.contracts[contractID], 'previousKeyOp', hash) + } + this.config.reactiveSet(state.contracts[contractID], 'HEAD', hash) + this.config.reactiveSet(state.contracts[contractID], 'height', height) + // If there were decryption errors due to missing encryption keys, we store + // those key IDs. If those key IDs are later shared with us, we can re-sync + // the contract. Without this information, we can only guess whether a + // re-sync is needed or not. + // We do it here because the property is stored under `.contracts` instead + // of in the contract state itself, and this is where `.contracts` gets + // updated after handling a message. + const missingDecryptionKeyIdsForMessage = missingDecryptionKeyIdsMap.get(message) + if (missingDecryptionKeyIdsForMessage) { + let missingDecryptionKeyIds = state.contracts[contractID].missingDecryptionKeyIds + if (!missingDecryptionKeyIds) { + missingDecryptionKeyIds = [] + this.config.reactiveSet(state.contracts[contractID], 'missingDecryptionKeyIds', missingDecryptionKeyIds) + } + missingDecryptionKeyIdsForMessage.forEach(keyId => { + if (missingDecryptionKeyIds!.includes(keyId)) return + missingDecryptionKeyIds!.push(keyId) + }) + } + + if (!this.subscriptionSet.has(contractID)) { + const entry = this.pending.find((entry) => entry?.contractID === contractID) + // we've successfully received it back, so remove it from expectation pending + if (entry) { + const index = this.pending.indexOf(entry) + if (index !== -1) { + this.pending.splice(index, 1) + } + } + this.subscriptionSet.add(contractID) + sbp('okTurtles.events/emit', CONTRACTS_MODIFIED, Array.from(this.subscriptionSet), { added: [contractID], removed: [] }) + } + + if (!processingErrored) { + sbp('okTurtles.events/emit', hash, contractID, message) + sbp('okTurtles.events/emit', EVENT_HANDLED, contractID, message) + } + } +} + +const notImplemented = (v: unknown) => { + throw new Error(`chelonia: action not implemented to handle: ${JSON.stringify(v)}.`) +} + +// The code below represents different ways to dynamically load code at runtime, +// and the SES example shows how to sandbox runtime loaded code (although it doesn't +// work, see https://github.com/endojs/endo/issues/1207 for details). It's also not +// super important since we're loaded signed contracts. +/* +// https://2ality.com/2019/10/eval-via-import.html +// Example: await import(esm`${source}`) +// const esm = ({ raw }, ...vals) => { +// return URL.createObjectURL(new Blob([String.raw({ raw }, ...vals)], { type: 'text/javascript' })) +// } + +// await loadScript.call(this, contractInfo.file, source, contractInfo.hash) +// .then(x => { +// console.debug(`loaded ${contractInfo.file}`) +// return x +// }) +// eslint-disable-next-line no-unused-vars +function loadScript (file, source, hash) { + return new Promise((resolve, reject) => { + const script = document.createElement('script') + // script.type = 'application/javascript' + script.type = 'module' + // problem with this is that scripts will step on each other's feet + script.text = source + // NOTE: this will work if the file route adds .header('Content-Type', 'application/javascript') + // script.src = `${this.config.connectionURL}/file/${hash}` + // this results in: "SyntaxError: import declarations may only appear at top level of a module" + // script.text = `(function () { + // ${source} + // })()` + script.onload = () => resolve(script) + script.onerror = (err) => reject(new Error(`${err || 'Error'} trying to load: ${file}`)) + document.getElementsByTagName('head')[0].appendChild(script) + }) +} + +// This code is cobbled together based on: +// https://github.com/endojs/endo/blob/master/packages/ses/test/test-import-cjs.js +// https://github.com/endojs/endo/blob/master/packages/ses/test/test-import.js +// const vm = await sesImportVM.call(this, `${this.config.connectionURL}/file/${contractInfo.hash}`) +// eslint-disable-next-line no-unused-vars +function sesImportVM (url): Promise { + // eslint-disable-next-line no-undef + const vm = new Compartment( + { + ...this.config.contracts.defaults.exposedGlobals, + console + }, + {}, // module map + { + resolveHook (spec, referrer) { + console.debug('resolveHook', { spec, referrer }) + return spec + }, + // eslint-disable-next-line require-await + async importHook (moduleSpecifier: string, ...args) { + const source = await this.config.fetch(moduleSpecifier).then(handleFetchResult('text')) + console.debug('importHook', { fetch: moduleSpecifier, args, source }) + const execute = (moduleExports, compartment, resolvedImports) => { + console.debug('execute called with:', { moduleExports, resolvedImports }) + const functor = compartment.evaluate( + `(function (require, exports, module, __filename, __dirname) { ${source} })` + // this doesn't seem to help with: https://github.com/endojs/endo/issues/1207 + // { __evadeHtmlCommentTest__: false, __rejectSomeDirectEvalExpressions__: false } + ) + const require_ = (importSpecifier) => { + console.debug('in-source require called with:', importSpecifier, 'keying:', resolvedImports) + const namespace = compartment.importNow(resolvedImports[importSpecifier]) + console.debug('got namespace:', namespace) + return namespace.default === undefined ? namespace : namespace.default + } + const module_ = { + get exports () { + return moduleExports + }, + set exports (newModuleExports) { + moduleExports.default = newModuleExports + } + } + functor(require_, moduleExports, module_, moduleSpecifier) + } + if (moduleSpecifier === '@common/common.js') { + return { + imports: [], + exports: ['Vue', 'L'], + execute + } + } else { + return { + imports: ['@common/common.js'], + exports: [], + execute + } + } + } + } + ) + // vm.evaluate(source) + return vm.import(url) +} +*/ diff --git a/src/local-selectors/index.ts b/src/local-selectors/index.ts new file mode 100644 index 0000000..b016e9a --- /dev/null +++ b/src/local-selectors/index.ts @@ -0,0 +1,129 @@ +// This file provides utility functions that are local regardless of whether +// Chelonia is running in a different context and calls are being forwarded +// using `chelonia/*` +import sbp from '@sbp/sbp' +import { cloneDeep } from 'turtledash' +import { CONTRACTS_MODIFIED, CONTRACTS_MODIFIED_READY, EVENT_HANDLED, EVENT_HANDLED_READY } from '../events.js' + +type Context = { + stateSelector: string; +} + +export default sbp('sbp/selectors/register', { + // This selector sets up event listeners on EVENT_HANDLED and CONTRACTS_MODIFIED + // to keep Chelonia state in sync with some external state (e.g., Vuex). + // This needs to be called from the context that owns this external state + // (e.g., the tab in which the app is running) and because 'full' Chelonia may + // be available in this context, we cannot use `chelonia/configure`. + // _If there is no external state to be kept in sync with Chelonia, this selector doesn't need to be called_ + // + // For example, **if Chelonia is running on a service worker**, the following + // would be done. + // 1. The service worker calls `chelonia/configure` and forwards EVENT_HANDLED + // and CONTRACTS_MODIFIED events to all clients (tabs) + // Note: `chelonia/configure` is called by the context running Chelonia + // 2. Each tab uses `chelonia/*` to forward calls to Chelonia to the SW. + // Note: Except selectors defined in this file + // 3. Each tab calls this selector once to set up event listeners on EVENT_HANDLED + // and CONTRACTS_MODIFIED, which will keep each tab's state updated every + // time Chelonia handles an event. + 'chelonia/externalStateSetup': function (this: Context, { stateSelector, reactiveSet = Reflect.set.bind(Reflect), reactiveDel = Reflect.deleteProperty.bind(Reflect) }: { + stateSelector: string, + reactiveSet: (target: object, propertyKey: PropertyKey, value: unknown) => void, + reactiveDel: (target: object, propertyKey: PropertyKey) => void + }) { + this.stateSelector = stateSelector + sbp('okTurtles.events/on', EVENT_HANDLED, (contractID: string, message: never) => { + // The purpose of putting things immediately into a queue is to have + // state mutations happen in a well-defined order. This is done for two + // purposes: + // 1. It avoids race conditions + // 2. It allows the app to use the EVENT_HANDLED queue to ensure that + // the SW state has been copied over to the local state. This is + // useful in the same sense that `chelonia/contract/wait` is useful + // (i.e., set up a barrier / sync checkpoint). + sbp('okTurtles.eventQueue/queueEvent', EVENT_HANDLED, async () => { + const { contractState, cheloniaState } = await sbp('chelonia/contract/fullState', contractID) + const externalState = sbp(stateSelector) + if (cheloniaState) { + if (!externalState.contracts) { + reactiveSet(externalState, 'contracts', Object.create(null)) + } + reactiveSet(externalState.contracts, contractID, cloneDeep(cheloniaState)) + } else if (externalState.contracts) { + reactiveDel(externalState.contracts, contractID) + } + if (contractState) { + reactiveSet(externalState, contractID, cloneDeep(contractState)) + } else { + reactiveDel(externalState, contractID) + } + + // This EVENT_HANDLED_READY event lets the current context (e.g., tab) + // know that an event has been processed _and_ committed to the state + // (as opposed to EVENT_HANDLED, which means the event was processed by + // _Chelonia_ but state changes may not be reflected in the current tab + // yet). + sbp('okTurtles.events/emit', EVENT_HANDLED_READY, contractID, message) + }) + }) + + sbp('okTurtles.events/on', CONTRACTS_MODIFIED, (subscriptionSet: never, { added, removed, permanent }: { added: Array, removed: Array, permanent: boolean }) => { + sbp('okTurtles.eventQueue/queueEvent', EVENT_HANDLED, async () => { + const states = added.length + ? await sbp('chelonia/contract/fullState', added) + : {} + const vuexState = sbp('state/vuex/state') + + if (!vuexState.contracts) { + reactiveSet(vuexState, 'contracts', Object.create(null)) + } + + removed.forEach((contractID: string) => { + if (permanent) { + reactiveSet(vuexState.contracts, contractID, null) + } else { + reactiveDel(vuexState.contracts, contractID) + } + reactiveDel(vuexState, contractID) + }) + for (const contractID of added) { + const { contractState, cheloniaState } = states[contractID] + if (cheloniaState) { + reactiveSet(vuexState.contracts, contractID, cloneDeep(cheloniaState)) + } + if (contractState) { + reactiveSet(vuexState, contractID, cloneDeep(contractState)) + } + } + sbp('okTurtles.events/emit', CONTRACTS_MODIFIED_READY, subscriptionSet, { added, removed }) + }) + }) + }, + // This function is similar in purpose to `chelonia/contract/wait`, except + // that it's also designed to take into account delays copying Chelonia state + // to an external state (e.g., when using `chelonia/externalStateSetup`). + 'chelonia/externalStateWait': async function (this: Context, contractID: string) { + await sbp('chelonia/contract/wait', contractID) + const { cheloniaState } = await sbp('chelonia/contract/fullState', contractID) + const localState = sbp(this.stateSelector) + // If the current 'local' state has a height higher than or equal to the + // Chelonia height, we've processed all events and don't need to wait any + // longer. + if (!cheloniaState || cheloniaState.height <= localState.contracts[contractID]?.height) return + + // Otherwise, listen for `EVENT_HANDLED_READY` events till we have reached + // the necessary height. + return new Promise((resolve) => { + const removeListener = sbp('okTurtles.events/on', EVENT_HANDLED_READY, (cID: string) => { + if (cID !== contractID) return + + const localState = sbp(this.stateSelector) + if (cheloniaState.height <= localState.contracts[contractID]?.height) { + resolve() + removeListener() + } + }) + }) + } +}) as string[] diff --git a/src/persistent-actions.test.ts b/src/persistent-actions.test.ts new file mode 100644 index 0000000..ebec1ca --- /dev/null +++ b/src/persistent-actions.test.ts @@ -0,0 +1,218 @@ +// FIXME: `Error: unsafe must be called before registering selector` when Mocha reloads the file. + +import sbp from '@sbp/sbp' +import assert from 'node:assert' +import { test } from 'node:test' + +import './db.js' + +import { PERSISTENT_ACTION_FAILURE, PERSISTENT_ACTION_SUCCESS, PERSISTENT_ACTION_TOTAL_FAILURE } from './events.js' +import './persistent-actions.js' +import type { PersistentActionError, PersistentActionSbpStatus, PersistentActionSuccess, UUIDV4 } from './persistent-actions.js' + +// Necessary to avoid 'JSON.stringify' errors since Node timeouts are circular objects, whereas browser timeouts are just integers. +setTimeout(() => {}).constructor.prototype.toJSON = () => undefined + +sbp('sbp/selectors/register', { + call R>(fn: T, ...args: A[]) { + return fn(...args) + }, + log (msg: T) { + console.log(msg) + }, + rejectAfter100ms (arg: T) { + return new Promise((resolve, reject) => { + setTimeout(() => reject(arg), 100) + }) + }, + resolveAfter100ms (arg: T) { + return new Promise((resolve) => { + setTimeout(() => resolve(arg), 100) + }) + }, + returnImmediately (arg: T) { + return arg + }, + throwImmediately (arg: T) { + throw arg + } +}) + +const createRandomError = () => new Error(`Bad number: ${String(Math.random())}`) +const getActionStatus = (id: string): PersistentActionSbpStatus => sbp('chelonia.persistentActions/status').find((obj: PersistentActionSbpStatus) => obj.id === id) +const isActionRemoved = (id: string) => !sbp('chelonia.persistentActions/status').find((obj: PersistentActionSbpStatus) => obj.id === id) + +// Custom `configure` options for tests. +// Mocha has a default 2000ms test timeout, therefore we'll use short delays. +const testOptions = { + maxAttempts: 3, + retrySeconds: 0.5 +} + +test('Test persistent actions', async (t) => { + const spies = { + returnImmediately: t.mock.fn(sbp('sbp/selectors/fn', 'returnImmediately')) + } + + await test('should configure', function () { + sbp('chelonia.persistentActions/configure', { + databaseKey: 'test-key', + options: testOptions + }) + }) + + await test('should enqueue without immediately attempting', function () { + // Prepare actions to enqueue. Random numbers are used to make invocations different. + const args = [ + // Basic syntax. + ['returnImmediately', Math.random()], + // Minimal option syntax. + { + invocation: ['returnImmediately', Math.random()] + }, + // Full option syntax. + { + errorInvocation: ['log', 'Action n°3 failed'], + invocation: ['returnImmediately', Math.random()], + maxAttempts: 4, + retrySeconds: 5, + skipCondition: ['test'], + totalFailureInvocation: ['log', 'Action n°3 totally failed'] + } + ] + const ids = sbp('chelonia.persistentActions/enqueue', ...args) + assert(Array.isArray(ids)) + assert(ids.length === args.length) + // Check the actions have been correctly queued. + ids.forEach((id, index) => { + const arg = args[index] + const status = getActionStatus(id) + assert.strictEqual(status.id, id) + assert.deepEqual(status.invocation, Array.isArray(arg) ? arg : arg.invocation) + assert.strictEqual(status.attempting, false) + assert.strictEqual(status.failedAttemptsSoFar, 0) + assert.strictEqual(status.lastError, '') + assert.strictEqual(status.nextRetry, '') + assert.strictEqual(status.resolved, false) + }) + // Check the actions have NOT been tried yet. + assert.strictEqual(spies.returnImmediately.mock.callCount(), 0) + }) + + await test('should emit a success event and remove the action', async () => { + // Prepare actions using both sync and async invocations. + // TODO: maybe the async case is enough, which would make the code simpler. + const randomNumbers = [Math.random(), Math.random()] + const invocations = [ + ['resolveAfter100ms', randomNumbers[0]], + ['returnImmediately', randomNumbers[1]] + ] + const ids = sbp('chelonia.persistentActions/enqueue', ...invocations) as UUIDV4[] + await Promise.all(ids.map((id, index) => new Promise((resolve, reject) => { + // Registers a success handler for each received id. + sbp('okTurtles.events/on', PERSISTENT_ACTION_SUCCESS, function handler (details: PersistentActionSuccess) { + if (details.id !== id) return + try { + // Check the action has actually been called and its result is correct. + assert.strictEqual(details.result, randomNumbers[index]) + // Check the action has been correctly removed. + assert(isActionRemoved(id)) + // Wait a little to make sure the action isn't going to be retried. + setTimeout(resolve, (testOptions.retrySeconds + 1) * 1e3) + } catch (err) { + reject(err) + } finally { + sbp('okTurtles.events/off', PERSISTENT_ACTION_SUCCESS, handler) + } + }) + }))) + }) + + await test('should emit a failure event and schedule a retry', function () { + const ourError = createRandomError() + const invocation = ['rejectAfter100ms', ourError] + const [id] = sbp('chelonia.persistentActions/enqueue', invocation) + return new Promise((resolve, reject) => { + sbp('okTurtles.events/once', PERSISTENT_ACTION_FAILURE, (details: PersistentActionError) => { + try { + assert.strictEqual(details.id, id) + assert.strictEqual(details.error, ourError) + // Check the action status. + const status = getActionStatus(id) + assert.strictEqual(status.failedAttemptsSoFar, 1) + assert.strictEqual(status.lastError, ourError.message) + assert.strictEqual(status.resolved, false) + // Check a retry has been scheduled. + assert(new Date(status.nextRetry).getTime() - Date.now() <= testOptions.retrySeconds * 1e3) + resolve() + } catch (err) { + reject(err) + } + }) + }) + }) + + await test('should emit N failure events, then a total failure event and remove the action (sync)', () => { + const ourError = createRandomError() + const invocation = ['throwImmediately', ourError] + return e2eFailureTest(invocation, ourError) + }) + + await test('should emit N failure events, then a total failure event and remove the action (async)', () => { + const ourError = createRandomError() + const invocation = ['rejectAfter100ms', ourError] + return e2eFailureTest(invocation, ourError) + }) + + await test('should handle non-Error failures gracefully', () => { + const ourError = 'not a real error' + const invocation = ['rejectAfter100ms', ourError] + return e2eFailureTest(invocation, ourError) + }) + + function e2eFailureTest (invocation: unknown, ourError: unknown) { + const errorInvocationSpy = t.mock.fn() + const errorInvocation = ['call', errorInvocationSpy] + + const [id] = sbp('chelonia.persistentActions/enqueue', { invocation, errorInvocation }) + + return new Promise((resolve, reject) => { + let failureEventCounter = 0 + sbp('okTurtles.events/on', PERSISTENT_ACTION_FAILURE, (details: { error: Error, id: string }) => { + if (details.id !== id) return + failureEventCounter++ + try { + assert(failureEventCounter <= testOptions.maxAttempts, '1') + // Check the event handler was called before the corresponding SBP invocation. + assert.strictEqual(failureEventCounter, errorInvocationSpy.mock.callCount() + 1, '2') + assert.strictEqual(details.error.message, (ourError as Error)?.message ?? ourError, '3') + } catch (err) { + reject(err) + } + }) + sbp('okTurtles.events/on', PERSISTENT_ACTION_TOTAL_FAILURE, (details: { error: Error, id: string }) => { + if (details.id !== id) return + try { + assert.strictEqual(failureEventCounter, testOptions.maxAttempts, '3') + assert.strictEqual(errorInvocationSpy.mock.callCount(), testOptions.maxAttempts, '4') + assert.strictEqual(details.error.message, (ourError as Error)?.message ?? ourError, '5') + assert(isActionRemoved(id), '6') + resolve() + } catch (err) { + reject(err) + } + }) + }) + } + + await test('should cancel and remove the given action', function () { + return new Promise((resolve, reject) => { + // This action will reject the promise and fail the test if it ever gets tried. + const [id] = sbp('chelonia.persistentActions/enqueue', ['call', reject]) + sbp('chelonia.persistentActions/cancel', id) + assert(isActionRemoved(id)) + // Wait half a second to be sure the action isn't going to be tried despite being removed. + setTimeout(resolve, 500) + }) + }) +}) diff --git a/src/persistent-actions.ts b/src/persistent-actions.ts new file mode 100644 index 0000000..aedbbb6 --- /dev/null +++ b/src/persistent-actions.ts @@ -0,0 +1,277 @@ +import '@sbp/okturtles.events' +import sbp from '@sbp/sbp' +import { PERSISTENT_ACTION_FAILURE, PERSISTENT_ACTION_SUCCESS, PERSISTENT_ACTION_TOTAL_FAILURE } from './events.js' + +// Using `Symbol` to prevent enumeration; this avoids JSON serialization. +const timer = Symbol('timer') + +type SbpInvocation = Parameters +export type UUIDV4 = `${string}-${string}-${string}-${string}-${string}` + +type PersistentActionOptions = { + errorInvocation?: SbpInvocation, + // Maximum number of tries, default: Infinity. + maxAttempts: number, + // How many seconds to wait between retries. + retrySeconds: number, + skipCondition?: SbpInvocation, + totalFailureInvocation?: SbpInvocation +} + +export type PersistentActionStatus = { + attempting: boolean, + failedAttemptsSoFar: number, + lastError: string, + nextRetry: string, + resolved: boolean +} + +export type PersistentActionError = { + id: UUIDV4, + error: Error +} + +export type PersistentActionSuccess = { + id: UUIDV4, + result: unknown +} + +export type PersistentActionSbpStatus = { + id: UUIDV4, + invocation: SbpInvocation, + attempting: boolean, + failedAttemptsSoFar: number, + lastError: string, + nextRetry: string, + resolved: boolean +} + +const coerceToError = (arg: unknown): Error => { + if (arg && arg instanceof Error) return arg + console.warn(tag, 'Please use Error objects when throwing or rejecting') + return new Error((typeof arg === 'string' ? arg : JSON.stringify(arg)) ?? 'undefined') +} + +const defaultOptions: PersistentActionOptions = { + maxAttempts: Number.POSITIVE_INFINITY, + retrySeconds: 30 +} +const tag = '[chelonia.persistentActions]' + +export class PersistentAction { + id: UUIDV4 + invocation: SbpInvocation + options: PersistentActionOptions + status: PersistentActionStatus + [timer]?: ReturnType + + constructor (invocation: SbpInvocation, options: Partial = {}) { + this.id = crypto.randomUUID() + this.invocation = invocation + this.options = { ...defaultOptions, ...options } + this.status = { + attempting: false, + failedAttemptsSoFar: 0, + lastError: '', + nextRetry: '', + resolved: false + } + } + + async attempt (): Promise { + // Bail out if the action is already attempting or resolved. + // TODO: should we also check whether the skipCondition call is pending? + if (this.status.attempting || this.status.resolved) return + if (await this.trySBP(this.options.skipCondition)) this.cancel() + // We need to check this again because cancel() could have been called while awaiting the trySBP call. + if (this.status.resolved) return + try { + this.status.attempting = true + const result = await sbp(...this.invocation) + this.status.attempting = false + this.handleSuccess(result) + } catch (error) { + this.status.attempting = false + await this.handleError(coerceToError(error)) + } + } + + cancel (): void { + if (this[timer]) clearTimeout(this[timer]) + this.status.nextRetry = '' + this.status.resolved = true + } + + async handleError (error: Error): Promise { + const { id, options, status } = this + // Update relevant status fields before calling any optional code. + status.failedAttemptsSoFar++ + status.lastError = error.message + const anyAttemptLeft = options.maxAttempts > status.failedAttemptsSoFar + if (!anyAttemptLeft) status.resolved = true + status.nextRetry = anyAttemptLeft && !status.resolved + ? new Date(Date.now() + options.retrySeconds * 1e3).toISOString() + : '' + // Perform any optional SBP invocation. + // The event has to be fired first for the action to be immediately removed from the list. + sbp('okTurtles.events/emit', PERSISTENT_ACTION_FAILURE, { error, id }) + await this.trySBP(options.errorInvocation) + if (!anyAttemptLeft) { + sbp('okTurtles.events/emit', PERSISTENT_ACTION_TOTAL_FAILURE, { error, id }) + await this.trySBP(options.totalFailureInvocation) + } + // Schedule a retry if appropriate. + if (status.nextRetry) { + // Note: there should be no older active timeout to clear. + this[timer] = setTimeout(() => { + this.attempt().catch((e) => { + console.error('Error attempting persistent action', id, e) + }) + }, this.options.retrySeconds * 1e3) + } + } + + handleSuccess (result: unknown): void { + const { id, status } = this + status.lastError = '' + status.nextRetry = '' + status.resolved = true + sbp('okTurtles.events/emit', PERSISTENT_ACTION_SUCCESS, { id, result }) + } + + async trySBP (invocation: SbpInvocation | void): Promise { + try { + return invocation ? await sbp(...invocation) : undefined + } catch (error) { + console.error(tag, coerceToError(error).message) + } + } +} + +// SBP API + +type PersistentActionContext = { + actionsByID: Record + checkDatabaseKey: () => void + databaseKey: string +} + +export default sbp('sbp/selectors/register', { + 'chelonia.persistentActions/_init' (this: PersistentActionContext): void { + this.actionsByID = Object.create(null) + this.checkDatabaseKey = () => { + if (!this.databaseKey) throw new TypeError(`${tag} No database key configured`) + } + sbp('okTurtles.events/on', PERSISTENT_ACTION_SUCCESS, ({ id }: { id: UUIDV4 }) => { + sbp('chelonia.persistentActions/cancel', id) + }) + sbp('okTurtles.events/on', PERSISTENT_ACTION_TOTAL_FAILURE, ({ id }: { id: UUIDV4 }) => { + sbp('chelonia.persistentActions/cancel', id) + }) + }, + + // Cancels a specific action by its ID. + // The action won't be retried again, but an async action cannot be aborted if its promise is stil attempting. + async 'chelonia.persistentActions/cancel' (this: PersistentActionContext, id: UUIDV4): Promise { + if (id in this.actionsByID) { + this.actionsByID[id].cancel() + // Note: this renders the `.status` update in `.cancel()` meainingless, as + // the action will be immediately removed. TODO: Implement as periodic + // prune action so that actions are removed some time after completion. + // This way, one could implement action status reporting to clients. + delete this.actionsByID[id] + return await sbp('chelonia.persistentActions/save') + } + }, + + // TODO: validation + 'chelonia.persistentActions/configure' (this: PersistentActionContext, { databaseKey, options = {} }: { databaseKey: string; options: Partial }): void { + this.databaseKey = databaseKey + for (const key in options) { + if (key in defaultOptions) { + (defaultOptions as Record)[key] = options[key as keyof PersistentActionOptions] + } else { + throw new TypeError(`${tag} Unknown option: ${key}`) + } + } + }, + + 'chelonia.persistentActions/enqueue' (this: PersistentActionContext, ...args: (SbpInvocation | { invocation: SbpInvocation } & PersistentActionOptions)[]): UUIDV4[] { + const ids: UUIDV4[] = [] + for (const arg of args) { + const action = Array.isArray(arg) + ? new PersistentAction(arg) + : new PersistentAction(arg.invocation, arg) + this.actionsByID[action.id] = action + ids.push(action.id) + } + sbp('chelonia.persistentActions/save').catch((e: unknown) => { + console.error('Error saving persistent actions', e) + }) + for (const id of ids) { + this.actionsByID[id].attempt().catch((e) => { + console.error('Error attempting persistent action', id, e) + }) + } + return ids + }, + + // Forces retrying a given persisted action immediately, rather than waiting for the scheduled retry. + // - 'status.failedAttemptsSoFar' will still be increased upon failure. + // - Does nothing if a retry is already running. + // - Does nothing if the action has already been resolved, rejected or cancelled. + 'chelonia.persistentActions/forceRetry' (this: PersistentActionContext, id: UUIDV4): void | Promise { + if (id in this.actionsByID) { + return this.actionsByID[id].attempt() + } + }, + + // Loads and tries every stored persistent action under the configured database key. + async 'chelonia.persistentActions/load' (this: PersistentActionContext): Promise { + this.checkDatabaseKey() + const storedActions = JSON.parse((await sbp('chelonia.db/get', this.databaseKey)) ?? '[]') + for (const { id, invocation, options } of storedActions) { + this.actionsByID[id] = new PersistentAction(invocation, options) + // Use the stored ID instead of the autogenerated one. + // TODO: find a cleaner alternative. + this.actionsByID[id].id = id + } + return sbp('chelonia.persistentActions/retryAll') + }, + + // Retry all existing persisted actions. + // TODO: add some delay between actions so as not to spam the server, + // or have a way to issue them all at once in a single network call. + 'chelonia.persistentActions/retryAll' (this: PersistentActionContext) { + return Promise.allSettled( + Object.keys(this.actionsByID).map(id => sbp('chelonia.persistentActions/forceRetry', id)) + ) + }, + + // Updates the database version of the attempting action list. + 'chelonia.persistentActions/save' (this: PersistentActionContext): Promise { + this.checkDatabaseKey() + return sbp( + 'chelonia.db/set', + this.databaseKey, + JSON.stringify(Object.values(this.actionsByID)) + ) + }, + + 'chelonia.persistentActions/status' (this: PersistentActionContext): PersistentActionSbpStatus[] { + return Object.values(this.actionsByID) + .map((action: PersistentAction) => ({ id: action.id, invocation: action.invocation, ...action.status })) + }, + + // Pauses every currently loaded action, and removes them from memory. + // Note: persistent storage is not affected, so that these actions can be later loaded again and retried. + 'chelonia.persistentActions/unload' (this: PersistentActionContext): void { + for (const id in this.actionsByID) { + // Clear the action's timeout, but don't cancel it so that it can later resumed. + if (this.actionsByID[id as UUIDV4][timer]) { + clearTimeout(this.actionsByID[id as UUIDV4][timer]) + } + delete this.actionsByID[id as UUIDV4] + } + } +}) as string[] diff --git a/src/presets.ts b/src/presets.ts new file mode 100644 index 0000000..a600e24 --- /dev/null +++ b/src/presets.ts @@ -0,0 +1,23 @@ +// Right now, we only have a single preset, for the server. If this remains the +// case and only the server is special regarding configuration, consider +// introducing a `server: true` key to `chelonia/confgure` instead. + +export const SERVER = { + // We don't check the subscriptionSet in the server because we accpt new + // contract registrations, and are also not subcribed to contracts the same + // way clients are + acceptAllMessages: true, + // The server also doesn't process actions + skipActionProcessing: true, + // The previous setting implies this one, which we set to be on the safe side + skipSideEffects: true, + // Changes the behaviour of unwrapMaybeEncryptedData so that it never decrypts. + // Mostly useful for the server, to avoid filling up the logs and for faster + // execution. + skipDecryptionAttempts: true, + // If an error occurs during processing, the message is rejected rather than + // ignored + strictProcessing: true, + // The server expects events to be received in order (no past or future events) + strictOrdering: true +} diff --git a/src/pubsub/index.test.ts b/src/pubsub/index.test.ts new file mode 100644 index 0000000..6c1dc70 --- /dev/null +++ b/src/pubsub/index.test.ts @@ -0,0 +1,49 @@ +import * as assert from 'node:assert/strict' +import { describe, it } from 'node:test' +import { createClient } from './index.js' + +const client = createClient('ws://localhost:8080', { + manual: true, + reconnectOnDisconnection: false, + reconnectOnOnline: false, + reconnectOnTimeout: false +}) +const { + maxReconnectionDelay, + minReconnectionDelay +} = client.options + +const createRandomDelays = (number: number) => { + return [...new Array(number)].map((_, i) => { + client.failedConnectionAttempts = i + return client.getNextRandomDelay() + }) +} +const delays1 = createRandomDelays(10) +const delays2 = createRandomDelays(10) + +describe('Test getNextRandomDelay()', function () { + it('every delay should be longer than the previous one', function () { + // In other words, the delays should be sorted in ascending numerical order. + assert.deepEqual(delays1, [...delays1].sort((a, b) => a - b)) + assert.deepEqual(delays2, [...delays2].sort((a, b) => a - b)) + }) + + it('no delay should be shorter than the minimal reconnection delay', function () { + delays1.forEach((delay) => { + assert.ok(delay >= minReconnectionDelay) + }) + delays2.forEach((delay) => { + assert.ok(delay >= minReconnectionDelay) + }) + }) + + it('no delay should be longer than the maximal reconnection delay', function () { + delays1.forEach((delay) => { + assert.ok(delay <= maxReconnectionDelay) + }) + delays2.forEach((delay) => { + assert.ok(delay <= maxReconnectionDelay) + }) + }) +}) diff --git a/src/pubsub/index.ts b/src/pubsub/index.ts new file mode 100644 index 0000000..dbf1088 --- /dev/null +++ b/src/pubsub/index.ts @@ -0,0 +1,833 @@ +/* eslint-disable @typescript-eslint/no-this-alias */ +import '@sbp/okturtles.events' +import sbp from '@sbp/sbp' +import type { JSONObject, JSONType } from '../types.js' + +// ====== Enums ====== // + +export const NOTIFICATION_TYPE = Object.freeze({ + ENTRY: 'entry', + DELETION: 'deletion', + KV: 'kv', + KV_FILTER: 'kv_filter', + PING: 'ping', + PONG: 'pong', + PUB: 'pub', + SUB: 'sub', + UNSUB: 'unsub', + VERSION_INFO: 'version_info' +}) + +export const REQUEST_TYPE = Object.freeze({ + PUB: 'pub', + SUB: 'sub', + UNSUB: 'unsub', + PUSH_ACTION: 'push_action', + KV_FILTER: 'kv_filter' +}) + +export const RESPONSE_TYPE = Object.freeze({ + ERROR: 'error', + OK: 'ok' +}) + +export const PUSH_SERVER_ACTION_TYPE = Object.freeze({ + SEND_PUBLIC_KEY: 'send-public-key', + STORE_SUBSCRIPTION: 'store-subscription', + DELETE_SUBSCRIPTION: 'delete-subscription', + SEND_PUSH_NOTIFICATION: 'send-push-notification' +}) + +export type NotificationTypeEnum = typeof NOTIFICATION_TYPE[keyof typeof NOTIFICATION_TYPE] +export type RequestTypeEnum = typeof REQUEST_TYPE[keyof typeof REQUEST_TYPE] +export type ResponseTypeEnum = typeof RESPONSE_TYPE[keyof typeof RESPONSE_TYPE] + +// ====== Types ====== // + +type TimeoutID = ReturnType + +export type Options = { + logPingMessages: boolean; + pingTimeout: number; + maxReconnectionDelay: number; + maxRetries: number; + minReconnectionDelay: number; + reconnectOnDisconnection: boolean; + reconnectOnOnline: boolean; + reconnectOnTimeout: boolean; + reconnectionDelayGrowFactor: number; + timeout: number; + manual?: boolean; + // eslint-disable-next-line no-use-before-define + handlers?: Partial; + // eslint-disable-next-line no-use-before-define + messageHandlers?: Partial; +} + +export type Message = { + [key: string]: JSONType, + type: string +} + +export type PubSubClient = { + connectionTimeoutID: TimeoutID | undefined, + connectionTimeUsed?: number, + // eslint-disable-next-line no-use-before-define + customEventHandlers: Partial, + failedConnectionAttempts: number, + isLocal: boolean, + isNew: boolean, + // eslint-disable-next-line no-use-before-define + listeners: ClientEventHandlers, + // eslint-disable-next-line no-use-before-define + messageHandlers: MessageHandlers, + nextConnectionAttemptDelayID: TimeoutID | undefined, + options: Options, + pendingSubscriptionSet: Set, + pendingUnsubscriptionSet: Set, + pingTimeoutID: TimeoutID | undefined, + shouldReconnect: boolean, + socket: WebSocket | null, + subscriptionSet: Set, + kvFilter: Map, + url: string, + // Methods + clearAllTimers(this: PubSubClient): void, + connect(this: PubSubClient): void, + destroy(this: PubSubClient): void, + pub(this: PubSubClient, channelID: string, data: JSONType): void, + scheduleConnectionAttempt(this: PubSubClient): void, + sub(this: PubSubClient, channelID: string): void, + unsub(this: PubSubClient, channelID: string): void, + getNextRandomDelay(this: PubSubClient): number, + setKvFilter(this: PubSubClient, channelID: string, kvFilter?: string[]): void +} + +type ClientEventHandlers = { + close (this: PubSubClient, event: CloseEvent): void, + error (this: PubSubClient, event: Event): void, + message (this: PubSubClient, event: MessageEvent): void, + offline (this: PubSubClient, event: Event): void, + online (this: PubSubClient, event: Event): void, + open (this: PubSubClient, event: Event): void, + 'reconnection-attempt' (this: PubSubClient, event: CustomEvent): void, + 'reconnection-succeeded' (this: PubSubClient, event: CustomEvent): void, + 'reconnection-failed' (this: PubSubClient, event: CustomEvent): void, + 'reconnection-scheduled' (this: PubSubClient, event: CustomEvent): void, + 'subscription-succeeded' (this: PubSubClient, event: CustomEvent): void +} + +type MessageHandlers = { + [NOTIFICATION_TYPE.ENTRY](this: PubSubClient, msg: { data: JSONType, type: string, [x: string]: unknown }): void, + [NOTIFICATION_TYPE.PING](this: PubSubClient, msg: { data: JSONType }): void, + [NOTIFICATION_TYPE.PUB](this: PubSubClient, msg: { channelID: string, data: JSONType }): void, + [NOTIFICATION_TYPE.KV](this: PubSubClient, msg: { channelID: string, key: string, data: JSONType }): void, + [NOTIFICATION_TYPE.SUB](this: PubSubClient, msg: { channelID: string, type: string, data: JSONType }): void, + [NOTIFICATION_TYPE.UNSUB](this: PubSubClient, msg: { channelID: string, type: string, data: JSONType }): void, + [RESPONSE_TYPE.ERROR](this: PubSubClient, msg: { data: { type: string, channelID: string, data: JSONType, reason: string, actionType?: string, message?: string } }): void, + [RESPONSE_TYPE.OK](this: PubSubClient, msg: { data: { type: string, channelID: string } }): void +} + +export type PubMessage = { + type: 'pub', + channelID: string, + data: JSONType +} + +export type SubMessage = { + [key: string]: JSONType, + type: 'sub', + channelID: string +} & { kvFilter?: Array } + +export type UnsubMessage = { + [key: string]: JSONType, + type: 'unsub', + channelID: string +} + +// TODO: verify these are good defaults +const defaultOptions: Options = { + logPingMessages: process.env.NODE_ENV === 'development' && !process.env.CI, + pingTimeout: 45000, + maxReconnectionDelay: 60000, + maxRetries: 10, + minReconnectionDelay: 500, + reconnectOnDisconnection: true, + reconnectOnOnline: true, + // Defaults to false to avoid reconnection attempts in case the server doesn't + // respond because of a failed authentication. + reconnectOnTimeout: false, + reconnectionDelayGrowFactor: 2, + timeout: 60000 +} + +// ====== Event name constants ====== // + +export const PUBSUB_ERROR = 'pubsub-error' +export const PUBSUB_RECONNECTION_ATTEMPT = 'pubsub-reconnection-attempt' +export const PUBSUB_RECONNECTION_FAILED = 'pubsub-reconnection-failed' +export const PUBSUB_RECONNECTION_SCHEDULED = 'pubsub-reconnection-scheduled' +export const PUBSUB_RECONNECTION_SUCCEEDED = 'pubsub-reconnection-succeeded' +export const PUBSUB_SUBSCRIPTION_SUCCEEDED = 'pubsub-subscription-succeeded' + +// ====== API ====== // + +/** + * Creates a pubsub client instance. + * + * @param {string} url - A WebSocket URL to connect to. + * @param {Object?} options + * {object?} handlers - Custom handlers for WebSocket events. + * {boolean?} logPingMessages - Whether to log received pings. + * {boolean?} manual - Whether the factory should call 'connect()' automatically. + * Also named 'autoConnect' or 'startClosed' in other libraries. + * {object?} messageHandlers - Custom handlers for different message types. + * {number?} pingTimeout=45_000 - How long to wait for the server to send a ping, in milliseconds. + * {boolean?} reconnectOnDisconnection=true - Whether to reconnect after a server-side disconnection. + * {boolean?} reconnectOnOnline=true - Whether to reconnect after coming back online. + * {boolean?} reconnectOnTimeout=false - Whether to reconnect after a connection timeout. + * {number?} timeout=5_000 - Connection timeout duration in milliseconds. + * @returns {PubSubClient} + */ +export function createClient (url: string, options: Partial = {}): PubSubClient { + const client: PubSubClient = { + customEventHandlers: options.handlers || {}, + // The current number of connection attempts that failed. + // Reset to 0 upon successful connection. + // Used to compute how long to wait before the next reconnection attempt. + failedConnectionAttempts: 0, + isLocal: /\/\/(localhost|127\.0\.0\.1)([:?/]|$)/.test(url), + // True if this client has never been connected yet. + isNew: true, + listeners: Object.create(null), + messageHandlers: { ...defaultMessageHandlers, ...options.messageHandlers }, + nextConnectionAttemptDelayID: undefined, + options: { ...defaultOptions, ...options }, + // Requested subscriptions for which we didn't receive a response yet. + pendingSubscriptionSet: new Set(), + pendingUnsubscriptionSet: new Set(), + pingTimeoutID: undefined, + shouldReconnect: true, + // The underlying WebSocket object. + // A new one is necessary for every connection or reconnection attempt. + socket: null, + subscriptionSet: new Set(), + kvFilter: new Map(), + connectionTimeoutID: undefined, + url: url.replace(/^http/, 'ws'), + ...publicMethods + } + // Create and save references to reusable event listeners. + // Every time a new underlying WebSocket object will be created for this + // client instance, these event listeners will be detached from the older + // socket then attached to the new one, hereby avoiding both unnecessary + // allocations and garbage collections of a bunch of functions every time. + // Another benefit is the ability to patch the client protocol at runtime by + // updating the client's custom event handler map. + for (const name of Object.keys(defaultClientEventHandlers) as (keyof typeof defaultClientEventHandlers)[]) { + client.listeners[name] = (event) => { + try { + // Use `.call()` to pass the client via the 'this' binding. + ;(defaultClientEventHandlers[name] as (this: PubSubClient, ev: typeof event) => void).call(client, event) + ;(client.customEventHandlers[name] as (this: PubSubClient, ev: typeof event) => void)?.call(client, event) + } catch (error) { + // Do not throw any error but emit an `error` event instead. + sbp('okTurtles.events/emit', PUBSUB_ERROR, client, (error as Error)?.message) + } + } + } + // Add global event listeners before the first connection. + if (typeof self === 'object' && self instanceof EventTarget) { + for (const name of globalEventNames) { + globalEventMap.set(name, client.listeners[name]) + } + } + if (!client.options.manual) { + client.connect() + } + return client +} + +export function createMessage (type: string, data: JSONType, meta?: object | null | undefined): { type: string, data: JSONType, [x: string]: unknown } { + const message = { ...meta, type, data } + let string: string + const stringify = function (this: typeof message) { + if (!string) string = JSON.stringify(this) + return string + } + Object.defineProperties(message, { + [Symbol.toPrimitive]: { + value: stringify + } + }) + return message +} + +export function createKvMessage (channelID: string, key: string, data: JSONType): string { + return JSON.stringify({ type: NOTIFICATION_TYPE.KV, channelID, key, data }) +} + +export function createPubMessage (channelID: string, data: JSONType): string { + return JSON.stringify({ type: NOTIFICATION_TYPE.PUB, channelID, data }) +} + +export function createRequest (type: RequestTypeEnum, data: JSONObject): string { + // Had to use Object.assign() instead of object spreading to make Flow happy. + return JSON.stringify(Object.assign({ type }, data)) +} + +// These handlers receive the PubSubClient instance through the `this` binding. +const defaultClientEventHandlers: ClientEventHandlers = { + // Emitted when the connection is closed. + close (event) { + const client = this + + console.debug('[pubsub] Event: close', event.code, event.reason) + client.failedConnectionAttempts++ + + if (client.socket) { + // Remove event listeners to avoid memory leaks. + for (const name of socketEventNames) { + client.socket.removeEventListener(name, client.listeners[name] as () => void) + } + } + client.socket = null + client.clearAllTimers() + + // This has been commented out to make the client always try to reconnect. + // See https://github.com/okTurtles/group-income/issues/1246 + /* + // See "Status Codes" https://tools.ietf.org/html/rfc6455#section-7.4 + switch (event.code) { + // TODO: verify that this list of codes is correct. + case 1000: case 1002: case 1003: case 1007: case 1008: { + client.shouldReconnect = false + break + } + default: break + } + */ + // If we should reconnect then consider our current subscriptions as pending again, + // waiting to be restored upon reconnection. + if (client.shouldReconnect) { + client.subscriptionSet.forEach((channelID) => { + // Skip contracts from which we had to unsubscribe anyway. + if (!client.pendingUnsubscriptionSet.has(channelID)) { + client.pendingSubscriptionSet.add(channelID) + } + }) + } + // We are no longer subscribed to any contracts since we are now disconnected. + client.subscriptionSet.clear() + client.pendingUnsubscriptionSet.clear() + + if (client.shouldReconnect && client.options.reconnectOnDisconnection) { + if (client.failedConnectionAttempts > client.options.maxRetries) { + sbp('okTurtles.events/emit', PUBSUB_RECONNECTION_FAILED, client) + } else { + // If we are definetely offline then do not try to reconnect now, + // unless the server is local. + if (!isDefinetelyOffline() || client.isLocal) { + client.scheduleConnectionAttempt() + } + } + } + }, + + // Emitted when an error has occured. + // The socket will be closed automatically by the engine if necessary. + error (event) { + const client = this + // Not all error events should be logged with console.error, for example every + // failed connection attempt generates one such event. + console.warn('[pubsub] Event: error', event) + clearTimeout(client.pingTimeoutID) + }, + + // Emitted when a message is received. + // The connection will be terminated if the message is malformed or has an + // unexpected data type (e.g. binary instead of text). + message (event: MessageEvent) { + const client = this + const { data } = event + + if (typeof data !== 'string') { + sbp('okTurtles.events/emit', PUBSUB_ERROR, client, { + message: `Wrong data type: ${typeof data}` + }) + return client.destroy() + } + let msg: Message = { type: '' } + + try { + msg = messageParser(data) + } catch (error) { + sbp('okTurtles.events/emit', PUBSUB_ERROR, client, { + message: `Malformed message: ${(error as Error)?.message}` + }) + return client.destroy() + } + const handler = client.messageHandlers[msg.type as keyof typeof client.messageHandlers] + + if (handler) { + (handler as (msg: Message) => void).call(client, msg) + } else { + throw new Error(`Unhandled message type: ${msg.type}`) + } + }, + + offline () { + console.info('[pubsub] Event: offline') + const client = this + + client.clearAllTimers() + // Reset the connection attempt counter so that we'll start a new + // reconnection loop when we are back online. + client.failedConnectionAttempts = 0 + client.socket?.close() + }, + + online () { + console.info('[pubsub] Event: online') + const client = this + + if (client.options.reconnectOnOnline && client.shouldReconnect) { + if (!client.socket) { + client.failedConnectionAttempts = 0 + client.scheduleConnectionAttempt() + } + } + }, + + // Emitted when the connection is established. + open () { + console.debug('[pubsub] Event: open') + const client = this + const { options } = this + + client.connectionTimeUsed = undefined + client.clearAllTimers() + sbp('okTurtles.events/emit', PUBSUB_RECONNECTION_SUCCEEDED, client) + + // Set it to -1 so that it becomes 0 on the next `close` event. + client.failedConnectionAttempts = -1 + client.isNew = false + // Setup a ping timeout if required. + // It will close the connection if we don't get any message from the server. + if (options.pingTimeout > 0 && options.pingTimeout < Infinity) { + client.pingTimeoutID = setTimeout(() => { + client.socket?.close() + }, options.pingTimeout) + } + // Send any pending subscription request. + client.pendingSubscriptionSet.forEach((channelID) => { + const kvFilter = this.kvFilter.get(channelID) + client.socket?.send(createRequest(REQUEST_TYPE.SUB, kvFilter ? { channelID, kvFilter } : { channelID })) + }) + // There should be no pending unsubscription since we just got connected. + }, + + 'reconnection-attempt' () { + console.info('[pubsub] Trying to reconnect...') + }, + + 'reconnection-succeeded' () { + console.info('[pubsub] Connection re-established') + }, + + 'reconnection-failed' () { + console.warn('[pubsub] Reconnection failed') + const client = this + + client.destroy() + }, + + 'reconnection-scheduled' (event) { + const { delay, nth } = event.detail + console.info(`[pubsub] Scheduled connection attempt ${nth} in ~${delay} ms`) + }, + + 'subscription-succeeded' (event) { + const { channelID } = event.detail + console.debug(`[pubsub] Subscribed to channel ${channelID}`) + } +} + +// These handlers receive the PubSubClient instance through the `this` binding. +const defaultMessageHandlers: MessageHandlers = { + [NOTIFICATION_TYPE.ENTRY] (msg) { + console.debug('[pubsub] Received ENTRY:', msg) + }, + + [NOTIFICATION_TYPE.PING] ({ data }) { + const client = this + + if (client.options.logPingMessages) { + console.debug(`[pubsub] Ping received in ${Date.now() - Number(data)} ms`) + } + // Reply with a pong message using the same data. + // TODO: Type coercion to string because we actually support passing this + // object type, but the correct TypeScript type hasn't been written. + client.socket?.send(createMessage(NOTIFICATION_TYPE.PONG, data) as unknown as string) + // Refresh the ping timer, waiting for the next ping. + clearTimeout(client.pingTimeoutID) + client.pingTimeoutID = setTimeout(() => { + client.socket?.close() + }, client.options.pingTimeout) + }, + + [NOTIFICATION_TYPE.PUB] ({ channelID, data }) { + console.log(`[pubsub] Received data from channel ${channelID}:`, data) + // No need to reply. + }, + + [NOTIFICATION_TYPE.KV] ({ channelID, key, data }) { + console.log(`[pubsub] Received KV update from channel ${channelID} ${key}:`, data) + // No need to reply. + }, + + [NOTIFICATION_TYPE.SUB] (msg) { + console.debug(`[pubsub] Ignoring ${msg.type} message:`, msg.data) + }, + + [NOTIFICATION_TYPE.UNSUB] (msg) { + console.debug(`[pubsub] Ignoring ${msg.type} message:`, msg.data) + }, + + [RESPONSE_TYPE.ERROR] ({ data }) { + const { type, channelID, reason } = data + console.warn(`[pubsub] Received ERROR response for ${type} request to ${channelID}`) + const client = this + + switch (type) { + case REQUEST_TYPE.SUB: { + console.warn(`[pubsub] Could not subscribe to ${channelID}: ${reason}`) + client.pendingSubscriptionSet.delete(channelID) + break + } + case REQUEST_TYPE.UNSUB: { + console.warn(`[pubsub] Could not unsubscribe from ${channelID}: ${reason}`) + client.pendingUnsubscriptionSet.delete(channelID) + break + } + case REQUEST_TYPE.PUSH_ACTION: { + const { actionType, message } = data + console.warn(`[pubsub] Received ERROR for PUSH_ACTION request with the action type '${actionType}' and the following message: ${message}`) + break + } + default: { + console.error(`[pubsub] Malformed response: invalid request type ${type}`) + } + } + }, + + [RESPONSE_TYPE.OK] ({ data: { type, channelID } }) { + const client = this + + switch (type) { + case REQUEST_TYPE.SUB: { + client.pendingSubscriptionSet.delete(channelID) + client.subscriptionSet.add(channelID) + sbp('okTurtles.events/emit', PUBSUB_SUBSCRIPTION_SUCCEEDED, client, { channelID }) + break + } + case REQUEST_TYPE.UNSUB: { + console.debug(`[pubsub] Unsubscribed from ${channelID}`) + client.pendingUnsubscriptionSet.delete(channelID) + client.subscriptionSet.delete(channelID) + client.kvFilter.delete(channelID) + break + } + case REQUEST_TYPE.KV_FILTER: { + console.debug(`[pubsub] Set KV filter for ${channelID}`) + break + } + default: { + console.error(`[pubsub] Malformed response: invalid request type ${type}`) + } + } + } +} + +const globalEventNames = ['offline', 'online'] as const +const socketEventNames = ['close', 'error', 'message', 'open'] as const +// eslint-disable-next-line func-call-spacing +const globalEventMap = new Map void>() + +if (typeof self === 'object' && self instanceof EventTarget) { + // We need to do things in this roundabout way because Chrome doesn't like + // these events handlers not being top-level. + // `Event handler of 'online' event must be added on the initial evaluation of worker script.` + for (const name of globalEventNames) { + const handler = (ev: Event) => { + const h = globalEventMap.get(name) + return h?.(ev) + } + self.addEventListener(name, handler, false) + } +} + +// `navigator.onLine` can give confusing false positives when `true`, +// so we'll define `isDefinetelyOffline()` rather than `isOnline()` or `isOffline()`. +// See https://developer.mozilla.org/en-US/docs/Web/API/Navigator/onLine +const isDefinetelyOffline = () => typeof navigator === 'object' && navigator.onLine === false + +// Parses and validates a received message. +export const messageParser = (data: string): Message => { + const msg = JSON.parse(data) + + if (typeof msg !== 'object' || msg === null) { + throw new TypeError('Message is null or not an object') + } + const { type } = msg + + if (typeof type !== 'string' || type === '') { + throw new TypeError('Message type must be a non-empty string') + } + return msg +} + +const publicMethods: { + clearAllTimers(this: PubSubClient): void, + connect(this: PubSubClient): void, + destroy(this: PubSubClient): void, + pub(this: PubSubClient, channelID: string, data: JSONType): void, + scheduleConnectionAttempt(this: PubSubClient): void, + sub(this: PubSubClient, channelID: string): void, + setKvFilter(this: PubSubClient, channelID: string, kvFilter?: string[]): void, + unsub(this: PubSubClient, channelID: string): void, + getNextRandomDelay(this: PubSubClient): number +} = { + clearAllTimers () { + const client = this + + clearTimeout(client.connectionTimeoutID) + clearTimeout(client.nextConnectionAttemptDelayID) + clearTimeout(client.pingTimeoutID) + client.connectionTimeoutID = undefined + client.nextConnectionAttemptDelayID = undefined + client.pingTimeoutID = undefined + }, + + // Performs a connection or reconnection attempt. + connect () { + const client = this + + if (client.socket !== null) { + throw new Error('connect() can only be called if there is no current socket.') + } + if (client.nextConnectionAttemptDelayID) { + throw new Error('connect() must not be called during a reconnection delay.') + } + if (!client.shouldReconnect) { + throw new Error('connect() should no longer be called on this instance.') + } + client.socket = new WebSocket(client.url) + // Sometimes (like when using `createMessage`), we want to send objects that + // are serialized as strings. Native web sockets don't support objects, so + // we use this workaround. + client.socket.send = function (data) { + const send = WebSocket.prototype.send.bind(this) + if ( + typeof data === 'object' && + typeof (data as object as { [Symbol.toPrimitive]?: unknown })[Symbol.toPrimitive] === 'function' + ) { + return send( + (data as object as { [Symbol.toPrimitive]: () => string })[Symbol.toPrimitive]() + ) + } + return send(data) + } + + if (client.options.timeout) { + const start = performance.now() + client.connectionTimeoutID = setTimeout(() => { + client.connectionTimeoutID = undefined + if (client.options.reconnectOnTimeout) { + client.connectionTimeUsed = performance.now() - start + } + client.socket?.close(4000, 'timeout') + }, client.options.timeout) + } + // Attach WebSocket event listeners. + for (const name of socketEventNames) { + client.socket.addEventListener(name, client.listeners[name] as () => void) + } + }, + + /** + * Immediately close the socket, stop listening for events and clear any cache. + * + * This method is used in unit tests. + * - In particular, no 'close' event handler will be called. + * - Any incoming or outgoing buffered data will be discarded. + * - Any pending messages will be discarded. + */ + destroy () { + const client = this + + client.clearAllTimers() + // Update property values. + // Note: do not clear 'client.options'. + client.pendingSubscriptionSet.clear() + client.pendingUnsubscriptionSet.clear() + client.subscriptionSet.clear() + // Remove global event listeners. + if (typeof self === 'object' && self instanceof EventTarget) { + for (const name of globalEventNames) { + globalEventMap.delete(name) + } + } + // Remove WebSocket event listeners. + if (client.socket) { + for (const name of socketEventNames) { + client.socket.removeEventListener(name, client.listeners[name] as () => void) + } + client.socket.close() + } + client.listeners = Object.create(null) + client.socket = null + client.shouldReconnect = false + }, + + getNextRandomDelay (): number { + const client = this + + const { + maxReconnectionDelay, + minReconnectionDelay, + reconnectionDelayGrowFactor + } = client.options + + const minDelay = minReconnectionDelay * reconnectionDelayGrowFactor ** client.failedConnectionAttempts + const maxDelay = minDelay * reconnectionDelayGrowFactor + const connectionTimeUsed = client.connectionTimeUsed + client.connectionTimeUsed = undefined + + return Math.min( + // See issue #1943: Have the connection time used 'eat into' the + // reconnection time used + Math.max( + minReconnectionDelay, + connectionTimeUsed ? maxReconnectionDelay - connectionTimeUsed : maxReconnectionDelay + ), + Math.round(minDelay + (0, Math.random)() * (maxDelay - minDelay)) + ) + }, + + // Schedules a connection attempt to happen after a delay computed according to + // a randomized exponential backoff algorithm variant. + scheduleConnectionAttempt () { + const client = this + + if (!client.shouldReconnect) { + throw new Error('Cannot call `scheduleConnectionAttempt()` when `shouldReconnect` is false.') + } + if (client.nextConnectionAttemptDelayID) { + return console.warn('[pubsub] A reconnection attempt is already scheduled.') + } + const delay = client.getNextRandomDelay() + const nth = client.failedConnectionAttempts + 1 + + client.nextConnectionAttemptDelayID = setTimeout(() => { + sbp('okTurtles.events/emit', PUBSUB_RECONNECTION_ATTEMPT, client) + client.nextConnectionAttemptDelayID = undefined + client.connect() + }, delay) + sbp('okTurtles.events/emit', PUBSUB_RECONNECTION_SCHEDULED, client, { delay, nth }) + }, + + // Can be used to send ephemeral messages outside of any contract log. + // Does nothing if the socket is not in the OPEN state. + pub (channelID: string, data: JSONType) { + if (this.socket?.readyState === WebSocket.OPEN) { + this.socket.send(createPubMessage(channelID, data)) + } + }, + /** + * Sends a SUB request to the server as soon as possible. + * + * - The given channel ID will be cached until we get a relevant server + * response, allowing us to resend the same request if necessary. + * - Any identical UNSUB request that has not been sent yet will be cancelled. + * - Calling this method again before the server has responded has no effect. + * @param channelID - The ID of the channel whose updates we want to subscribe to. + */ + sub (channelID: string) { + const client = this + const { socket } = this + + if (!client.pendingSubscriptionSet.has(channelID)) { + client.pendingSubscriptionSet.add(channelID) + client.pendingUnsubscriptionSet.delete(channelID) + + if (socket?.readyState === WebSocket.OPEN) { + const kvFilter = client.kvFilter.get(channelID) + socket.send(createRequest(REQUEST_TYPE.SUB, kvFilter ? { channelID, kvFilter } : { channelID })) + } + } + }, + + /** + * Sends a KV_FILTER request to the server as soon as possible. + */ + setKvFilter (channelID: string, kvFilter?: string[]) { + const client = this + const { socket } = this + + if (kvFilter) { + client.kvFilter.set(channelID, kvFilter) + } else { + client.kvFilter.delete(channelID) + } + + if (client.subscriptionSet.has(channelID)) { + if (socket?.readyState === WebSocket.OPEN) { + socket.send(createRequest(REQUEST_TYPE.KV_FILTER, kvFilter ? { channelID, kvFilter } : { channelID })) + } + } + }, + + /** + * Sends an UNSUB request to the server as soon as possible. + * + * - The given channel ID will be cached until we get a relevant server + * response, allowing us to resend the same request if necessary. + * - Any identical SUB request that has not been sent yet will be cancelled. + * - Calling this method again before the server has responded has no effect. + * @param channelID - The ID of the channel whose updates we want to unsubscribe from. + */ + unsub (channelID: string) { + const client = this + const { socket } = this + + if (!client.pendingUnsubscriptionSet.has(channelID)) { + client.pendingSubscriptionSet.delete(channelID) + client.pendingUnsubscriptionSet.add(channelID) + + if (socket?.readyState === WebSocket.OPEN) { + socket.send(createRequest(REQUEST_TYPE.UNSUB, { channelID })) + } + } + } +} + +// Register custom SBP event listeners before the first connection. +for (const name of Object.keys(defaultClientEventHandlers)) { + if (name === 'error' || !(socketEventNames as readonly string[]).includes(name)) { + sbp('okTurtles.events/on', `pubsub-${name}`, (target: PubSubClient, detail?: object) => { + const ev = new CustomEvent(name, { detail }) + ;(target.listeners[name as keyof ClientEventHandlers] as (this: typeof target, e: typeof ev) => void).call(target, ev) + }) + } +} + +export default { + NOTIFICATION_TYPE, + REQUEST_TYPE, + RESPONSE_TYPE, + createClient, + createMessage, + createRequest +} diff --git a/src/signedData.ts b/src/signedData.ts new file mode 100644 index 0000000..e5e17ae --- /dev/null +++ b/src/signedData.ts @@ -0,0 +1,344 @@ +import type { Key } from '@chelonia/crypto' +import { deserializeKey, keyId, serializeKey, sign, verifySignature } from '@chelonia/crypto' +import sbp from '@sbp/sbp' +import { has } from 'turtledash' +import { ChelErrorSignatureError, ChelErrorSignatureKeyNotFound, ChelErrorSignatureKeyUnauthorized } from './errors.js' +import { blake32Hash } from './functions.js' +import type { ChelContractState } from './types.js' + +const rootStateFn = () => sbp('chelonia/rootState') + +export interface SignedData { + // The ID of the signing key used + signingKeyId: string, + // The unsigned data. For outgoing data, this is the original data given + // as input. For incoming data, signature verification will be attempted. + valueOf: () => T, + // The serialized _signed_ data. For outgoing data, signing will be + // attempted. For incoming data, this is the original data given as input. + // The `additionalData` parameter is only used for outgoing data, and binds + // the signed payload to additional information. + serialize: (additionalData?: string) => U & { _signedData: [string, string, string] }, + // Data needed to recreate signed data. + // [contractID, data, height, additionalData] + context?: [string, U & { _signedData: [string, string, string] }, number, string], + // A string version of the serialized signed data (i.e., `JSON.stringify()`) + toString: (additionalData?: string) => string, + // For outgoing data, recreate SignedData using different data and the same + // parameters + recreate?: (data: T) => SignedData, + // For incoming data, this is an alias of `serialize`. Undefined for outgoing + // data. + toJSON?: () => U & { _signedData: [string, string, string] }, + // `get` and `set` can set additional (unsigned) fields within `SignedData` + get: (k: keyof U) => U[typeof k] | undefined, + set?: (k: keyof U, v: U[typeof k]) => void +} + +// `proto` & `wrapper` are utilities for `isSignedData` +const proto = Object.create(null, { + _isSignedData: { + value: true + } +}) as object + +const wrapper = (o: T): T => { + return Object.setPrototypeOf(o, proto) +} + +// `isSignedData` will return true for objects created by the various +// `signed*Data` functions. It's meant to implement functionality equivalent +// to `o instanceof SignedData` +export const isSignedData = (o: unknown): o is SignedData => { + return !!o && !!Object.getPrototypeOf(o)?._isSignedData +} + +// TODO: Check for permissions and allowedActions; this requires passing some +// additional context +const signData = function (stateOrContractID: string | ChelContractState, sKeyId: string, data: T, extraFields: U, additionalKeys: Record, additionalData: string): U & { + _signedData: [string, string, string] +} { + const state = typeof stateOrContractID === 'string' ? rootStateFn()[stateOrContractID] as ChelContractState : stateOrContractID + if (!additionalData) { + throw new ChelErrorSignatureError('Signature additional data must be provided') + } + // Has the key been revoked? If so, attempt to find an authorized key by the same name + const designatedKey = state?._vm?.authorizedKeys?.[sKeyId] + if (!designatedKey?.purpose.includes( + 'sig' + )) { + throw new ChelErrorSignatureKeyNotFound(`Signing key ID ${sKeyId} is missing or is missing signing purpose`) + } + if (designatedKey._notAfterHeight != null) { + const name = state._vm.authorizedKeys[sKeyId].name + const newKeyId = Object.values(state._vm?.authorizedKeys).find((v) => v._notAfterHeight == null && v.name === name && v.purpose.includes('sig'))?.id + + if (!newKeyId) { + throw new ChelErrorSignatureKeyNotFound(`Signing key ID ${sKeyId} has been revoked and no new key exists by the same name (${name})`) + } + + sKeyId = newKeyId + } + + const key = additionalKeys[sKeyId] + + if (!key) { + throw new ChelErrorSignatureKeyNotFound(`Missing signing key ${sKeyId}`) + } + + const deserializedKey = typeof key === 'string' ? deserializeKey(key) : key + + const serializedData = JSON.stringify(data, (_, v) => { + if (v && has(v, 'serialize') && typeof v.serialize === 'function') { + if (v.serialize.length === 1) { + return v.serialize(additionalData) + } else { + return v.serialize() + } + } + return v + }) + + const payloadToSign = blake32Hash(`${blake32Hash(additionalData)}${blake32Hash(serializedData)}`) + + return { + ...extraFields, + _signedData: [ + serializedData, + keyId(deserializedKey), + sign(deserializedKey, payloadToSign) + ] + } +} + +// TODO: Check for permissions and allowedActions; this requires passing the +// entire SPMessage +const verifySignatureData = function (state: ChelContractState, height: number, data: U & { _signedData: [string, string, string] }, additionalData: string): [string, T] { + if (!state) { + throw new ChelErrorSignatureError('Missing contract state') + } + + if (!isRawSignedData(data)) { + throw new ChelErrorSignatureError('Invalid message format') + } + + if (!Number.isSafeInteger(height) || height < 0) { + throw new ChelErrorSignatureError(`Height ${height} is invalid or out of range`) + } + + const [serializedMessage, sKeyId, signature] = data._signedData + const designatedKey = state._vm?.authorizedKeys?.[sKeyId] + + if (!designatedKey || (height > designatedKey._notAfterHeight!) || (height < designatedKey._notBeforeHeight) || !designatedKey.purpose.includes( + 'sig' + )) { + // These errors (ChelErrorSignatureKeyUnauthorized) are serious and + // indicate a bug. Make them fatal when running integration tests + // (otherwise, they get swallowed and shown as a notification) + if (process.env.CI) { + console.error(`Key ${sKeyId} is unauthorized or expired for the current contract`, { designatedKey, height, state: JSON.parse(JSON.stringify(sbp('state/vuex/state'))) }) + // An unhandled promise rejection will cause Cypress to fail + Promise.reject(new ChelErrorSignatureKeyUnauthorized( + `Key ${sKeyId} is unauthorized or expired for the current contract` + )) + } + throw new ChelErrorSignatureKeyUnauthorized( + `Key ${sKeyId} is unauthorized or expired for the current contract` + ) + } + + // TODO + const deserializedKey = designatedKey.data + + const payloadToSign = blake32Hash(`${blake32Hash(additionalData)}${blake32Hash(serializedMessage)}`) + + try { + verifySignature(deserializedKey, payloadToSign, signature) + + const message = JSON.parse(serializedMessage) + + return [sKeyId, message] + } catch (e) { + throw new ChelErrorSignatureError((e as Error)?.message || e as string) + } +} + +export const signedOutgoingData = (stateOrContractID: string | ChelContractState, sKeyId: string, data: T, additionalKeys?: Record): SignedData => { + if (!stateOrContractID || data === undefined || !sKeyId) throw new TypeError('Invalid invocation') + + if (!additionalKeys) { + additionalKeys = rootStateFn().secretKeys + } + + const extraFields = Object.create(null) as U + + const boundStringValueFn = signData.bind(null, stateOrContractID, sKeyId, data, extraFields, additionalKeys!) + const serializefn = (additionalData?: string) => boundStringValueFn(additionalData || '') as U & { _signedData: [string, string, string] } + + return wrapper({ + get signingKeyId () { + return sKeyId + }, + get serialize () { + return serializefn + }, + get toString () { + return (additionalData?: string) => JSON.stringify(this.serialize(additionalData)) + }, + get valueOf () { + return () => data + }, + get recreate () { + return (data: T) => signedOutgoingData(stateOrContractID, sKeyId, data, additionalKeys) + }, + get get () { + return (k: keyof U) => extraFields[k] + }, + get set () { + return (k: keyof U, v: U[typeof k]) => { + extraFields[k] = v + } + } + }) +} + +// Used for OP_CONTRACT as a state does not yet exist +export const signedOutgoingDataWithRawKey = (key: Key, data: T): SignedData => { + const sKeyId = keyId(key) + const state = { + _vm: { + authorizedKeys: { + [sKeyId]: { + purpose: ['sig'], + data: serializeKey(key, false), + _notBeforeHeight: 0, + _notAfterHeight: undefined + } + } + } + } as ChelContractState + + const extraFields = Object.create(null) + + const boundStringValueFn = signData.bind(null, state, sKeyId, data, extraFields, { [sKeyId]: key }) + const serializefn = (additionalData?: string) => boundStringValueFn(additionalData || '') as U & { _signedData: [string, string, string] } + + return wrapper({ + get signingKeyId () { + return sKeyId + }, + get serialize () { + return serializefn + }, + get toString () { + return (additionalData?: string) => JSON.stringify(this.serialize(additionalData)) + }, + get valueOf () { + return () => data + }, + get recreate () { + return (data: T) => signedOutgoingDataWithRawKey(key, data) + }, + get get () { + return (k: keyof U) => extraFields[k] + }, + get set () { + return (k: keyof U, v: U[typeof k]) => { + extraFields[k] = v + } + } + }) +} + +export const signedIncomingData = (contractID: string, state: object | null | undefined, data: U & { _signedData: [string, string, string] }, height: number, additionalData: string, mapperFn?: (value: V) => T): SignedData => { + const stringValueFn = () => data + let verifySignedValue: [string, T] + const verifySignedValueFn = () => { + if (verifySignedValue) { + return verifySignedValue[1] + } + verifySignedValue = verifySignatureData(state || rootStateFn()[contractID], height, data, additionalData) as [string, T] + if (mapperFn) verifySignedValue[1] = mapperFn(verifySignedValue[1] as unknown as V) + return verifySignedValue[1] + } + + return wrapper({ + get signingKeyId () { + if (verifySignedValue) return verifySignedValue[0] + return signedDataKeyId(data) + }, + get serialize () { + return stringValueFn + }, + get context (): [string, U & { _signedData: [string, string, string] }, number, string] { + return [contractID, data, height, additionalData] + }, + get toString () { + return () => JSON.stringify(this.serialize()) + }, + get valueOf () { + return verifySignedValueFn + }, + get toJSON () { + return this.serialize + }, + get get () { + return (k: keyof U) => k !== '_signedData' ? data[k] : undefined + } + }) +} + +export const signedDataKeyId = (data: unknown): string => { + if (!isRawSignedData(data)) { + throw new ChelErrorSignatureError('Invalid message format') + } + + return data._signedData[1] +} + +export const isRawSignedData = (data: unknown): data is { _signedData: [string, string, string ] } => { + if (!data || typeof data !== 'object' || !has(data, '_signedData') || !Array.isArray((data as { _signedData: unknown })._signedData) || (data as { _signedData: unknown[] })._signedData.length !== 3 || (data as { _signedData: unknown[] })._signedData.map(v => typeof v).filter(v => v !== 'string').length !== 0) { + return false + } + + return true +} + +// WARNING: The following function (rawSignedIncomingData) will not check signatures +export const rawSignedIncomingData = (data: U & { _signedData: [string, string, string] }): SignedData => { + if (!isRawSignedData(data)) { + throw new ChelErrorSignatureError('Invalid message format') + } + + const stringValueFn = () => data + let verifySignedValue: [string, T] + const verifySignedValueFn = () => { + if (verifySignedValue) { + return verifySignedValue[1] + } + verifySignedValue = [data._signedData[1], JSON.parse(data._signedData[0])] + return verifySignedValue[1] + } + + return wrapper({ + get signingKeyId () { + if (verifySignedValue) return verifySignedValue[0] + return signedDataKeyId(data) + }, + get serialize () { + return stringValueFn + }, + get toString () { + return () => JSON.stringify(this.serialize()) + }, + get valueOf () { + return verifySignedValueFn + }, + get toJSON () { + return this.serialize + }, + get get () { + return (k: keyof U) => k !== '_signedData' ? data[k] : undefined + } + }) +} diff --git a/src/time-sync.ts b/src/time-sync.ts new file mode 100644 index 0000000..cb9bcaf --- /dev/null +++ b/src/time-sync.ts @@ -0,0 +1,122 @@ +import sbp from '@sbp/sbp' +import type { CheloniaContext } from './types.js' + +// `wallBase` is the base used to calculate wall time (i.e., time elapsed as one +// would get from, e.g., looking a clock hanging from a wall). +// Although optimistically +// it has a default value to local time, it'll be updated to the server's time +// once `chelonia/private/startClockSync` is called +// From Wikipedia: 'walltime is the actual time taken from the start of a +// computer program to the end. In other words, it is the difference between +// the time at which a task finishes and the time at which the task started.' +let wallBase = Date.now() +// `monotonicBase` is the base used to calculate an offset to apply to `wallBase` +// to estimate the server's current wall time. +let monotonicBase = performance.now() +// `undefined` means the sync process has been stopped, `null` that the current +// request has finished +let resyncTimeout: ReturnType | null | undefined +let watchdog: ReturnType | null | undefined + +const syncServerTime = async function (this: CheloniaContext) { + // Get our current monotonic time + const startTime = performance.now() + // Now, ask the server for the time + const time = await this.config.fetch(`${this.config.connectionURL}/time`, { signal: this.abortController.signal }) + const requestTimeElapsed = performance.now() + if (requestTimeElapsed - startTime > 8000) { + throw new Error('Error fetching server time: request took too long') + } + // If the request didn't succeed, report it + if (!time.ok) throw new Error('Error fetching server time') + const serverTime = (new Date(await time.text())).valueOf() + // If the value could not be parsed, report that as well + if (Number.isNaN(serverTime)) throw new Error('Unable to parse server time') + // Adjust `wallBase` based on the elapsed request time. We can't know + // how long it took for the server to respond, but we can estimate that it's + // about half the time from the moment we made the request. + const newMonotonicBase = performance.now() + wallBase = + serverTime + + (requestTimeElapsed - startTime) / 2 + + // Also take into account the time elapsed between `requestTimeElapsed` + // and this line (which should be very little) + (newMonotonicBase - requestTimeElapsed) + monotonicBase = newMonotonicBase +} + +export default sbp('sbp/selectors/register', { + 'chelonia/private/startClockSync': function (this: CheloniaContext) { + if (resyncTimeout !== undefined) { + throw new Error('chelonia/private/startClockSync has already been called') + } + // Default re-sync every 5 minutes + const resync = (delay: number = 300000) => { + // If there's another time sync process in progress, don't do anything + if (resyncTimeout !== null) return + const timeout = setTimeout(() => { + // Get the server time + syncServerTime.call(this).then(() => { + // Mark the process as finished + if (resyncTimeout === timeout) resyncTimeout = null + // And then restart the listener + resync() + }).catch(e => { + // If there was an error, log it and possibly attempt again + if (resyncTimeout === timeout) { + // In this case, it was the current task that failed + resyncTimeout = null + console.error('Error re-syncing server time; will re-attempt in 5s', e) + // Call resync again, with a shorter delay + setTimeout(() => resync(0), 5000) + } else { + // If there is already another attempt, just log it + console.error('Error re-syncing server time; another attempt is in progress', e) + } + }) + }, delay) + resyncTimeout = timeout + } + + let wallLast = Date.now() + let monotonicLast = performance.now() + + // Watchdog to ensure our time doesn't drift. Periodically check for + // differences between the elapsed wall time and the elapsed monotonic + // time + watchdog = setInterval(() => { + const wallNow = Date.now() + const monotonicNow = performance.now() + const difference = Math.abs(Math.abs((wallNow - wallLast)) - Math.abs((monotonicNow - monotonicLast))) + // Tolerate up to a 10ms difference + if (difference > 10) { + if (resyncTimeout != null) clearTimeout(resyncTimeout) + resyncTimeout = null + resync(0) + } + wallLast = wallNow + monotonicLast = monotonicNow + }, 10000) + + // Start the sync process + resyncTimeout = null + resync(0) + }, + 'chelonia/private/stopClockSync': () => { + if (resyncTimeout !== undefined) { + if (watchdog != null) clearInterval(watchdog) + if (resyncTimeout != null) clearTimeout(resyncTimeout) + watchdog = undefined + resyncTimeout = undefined + } + }, + // Get an estimate of the server's current time based on the time elapsed as + // measured locally (using a monotonic clock), which is used as an offset, and + // a previously retrieved server time. The time value is returned as a UNIX + // _millisecond_ timestamp (milliseconds since 1 Jan 1970 00:00:00 UTC) + 'chelonia/time': function (): number { + const monotonicNow = performance.now() + const wallNow = wallBase - monotonicBase + monotonicNow + return Math.round(wallNow) + } +}) as string[] diff --git a/src/types.ts b/src/types.ts new file mode 100644 index 0000000..c28ad44 --- /dev/null +++ b/src/types.ts @@ -0,0 +1,289 @@ +/* eslint-disable no-use-before-define */ + +import type { Key } from '@chelonia/crypto' +import type sbp from '@sbp/sbp' +import type { SPMessage, SPMsgDirection, SPOpType } from './SPMessage.js' +import type { EncryptedData } from './encryptedData.js' +import type { PubSubClient } from './pubsub/index.js' + +export type JSONType = + | null + | string + | number + | boolean + | JSONObject + | JSONArray; +export interface JSONObject { + [x: string]: JSONType; +} +export type JSONArray = Array; + +export type ResType = + | ResTypeErr | ResTypeOK | ResTypeAlready + | ResTypeSub | ResTypeUnsub | ResTypeEntry | ResTypePub +export type ResTypeErr = 'error' +export type ResTypeOK = 'success' +export type ResTypeAlready = 'already' +export type ResTypeSub = 'sub' +export type ResTypeUnsub = 'unsub' +export type ResTypePub = 'pub' +export type ResTypeEntry = 'entry' + +export type CheloniaConfig = { + // eslint-disable-next-line no-unused-vars + [_ in `preOp_${SPOpType}`]?: (message: SPMessage, state: ChelContractState) => boolean; +} & { + // eslint-disable-next-line no-unused-vars + [_ in `postOp_${SPOpType}`]?: (message: SPMessage, state: ChelContractState) => boolean; +} & { + connectionURL: string; + stateSelector: string; + contracts: { + defaults: { + // '' => resolved module import + modules: Record; + exposedGlobals: object; + allowedDomains: string[]; + allowedSelectors: string[]; + preferSlim: boolean; + }; + // TODO: Currently not used + overrides: object; + manifests: Record; + }; + whitelisted: (action: string) => boolean; + reactiveSet: (obj: T, key: keyof T, value: T[typeof key]) => void; + fetch: typeof fetch; + reactiveDel: (obj: T, key: keyof T) => void; + acceptAllMessages: boolean; + skipActionProcessing: boolean; + skipSideEffects: boolean; + strictProcessing: boolean; + // Strict ordering will throw on past events with ChelErrorAlreadyProcessed + // Similarly, future events will not be reingested and will throw + // with ChelErrorDBBadPreviousHEAD + strictOrdering: boolean; + connectionOptions: { + maxRetries: number; + reconnectOnTimeout: boolean; + }; preOp?: (message: SPMessage, state: ChelContractState) => boolean; + postOp?: (message: SPMessage, state: ChelContractState) => boolean; + hooks: Partial<{ + preHandleEvent: { (message: SPMessage): Promise; } | null; + postHandleEvent: { (message: SPMessage): Promise; } | null; + processError: { (e: unknown, message: SPMessage | null | undefined, meta: object | null | undefined): void; } | null; + sideEffectError: { (e: unknown, message?: SPMessage): void; } | null; + handleEventError: { (e: unknown, message?: SPMessage): void; } | null; + syncContractError: { (e: unknown, contractID: string): void; } | null; + pubsubError: { (e: unknown, socket: PubSubClient): void; } | null; + }>; + skipDecryptionAttempts: boolean; + unwrapMaybeEncryptedData: (data: T | EncryptedData) => { + encryptionKeyId: string | null; + data: T; + } | undefined; +}; + +export type SendMessageHooks = Partial<{ + prepublish: (entry: SPMessage) => void | Promise, + onprocessed: (entry: SPMessage) => void, + preSendCheck: (entry: SPMessage, state: ChelContractState) => boolean | Promise, + beforeRequest: (newEntry: SPMessage, oldEntry: SPMessage) => void | Promise, + postpublish: (entry: SPMessage) => void | Promise, +}> + +export type ChelContractProcessMessageObject = Readonly<{ + data: object, + meta: object, + hash: string, + height: number, + contractID: string, + direction: SPMsgDirection, + signingKeyId: string, + signingContractID: string, + innerSigningKeyId?: string | null | undefined, + innerSigningContractID?: string | null | undefined +}> +export type ChelContractSideeffectMutationObject = Readonly<{ + data: object, + meta: object, + hash: string, + height: number, + contractID: string, + description: string, + direction: SPMsgDirection, + signingKeyId: string, + signingContractID: string, + innerSigningKeyId?: string | null | undefined, + innerSigningContractID?: string | null | undefined +}> + +export type CheloniaContractCtx = { + getters: Record (state: ChelContractState, obj: T) => T[K]>, + name: string, + manifest: string, + metadata: { + create: () => object | Promise + validate: (meta: object, { state, contractID, ...gProxy }: { state: ChelContractState, contractID: string }) => void | Promise, + } + sbp: typeof sbp + state: (contractID: string) => ChelContractState, + actions: Record void | Promise + process: (message: ChelContractProcessMessageObject, { state, ...gProxy }: { state: ChelContractState }) => void | Promise + sideEffect?: (mutation: ChelContractSideeffectMutationObject, { state, ...gProxy }: { state: ChelContractState }) => void | Promise + }>, + methods: Record +} +export type CheloniaContext = { + config: CheloniaConfig, + _instance: object, + abortController: AbortController, + state: { + contracts: Record, + pending: string[], + [x: string]: unknown + }, + manifestToContract: Record, + whitelistedActions: Record, + currentSyncs: Record, + postSyncOperations: Record>>, + sideEffectStacks: Record[]>, + sideEffectStack: (contractID: string) => Array>, + setPostSyncOp: (contractID: string, key: string, op: Parameters) => void, + transientSecretKeys: Record, + ephemeralReferenceCount: Record, + subscriptionSet: Set, + pending: { contractID: string }[], + pubsub: import('./pubsub/index.js').PubSubClient, + contractsModifiedListener: (contracts: Set, { added, removed }: { added: string[], removed: string[] }) => void, + defContractSelectors: string[], + defContractManifest: string, + defContractSBP: typeof sbp, + defContract: CheloniaContractCtx +} + +export type ChelContractManifestBody = { + name: string, + version: string, + contract: { hash: string, file: string }, + contractSlim: { hash: string, file: string }, + signingKeys: string[] +} + +export type ChelContractManifest = { + head: string, // '{ manifestVersion : 1.0.0" }' + body: string // 'ChelContractManifestBody' + signature: { + keyId: string, + value: string + } +} + +export type ChelFileManifest = { + version: '1.0.0', + type?: string, + meta?: unknown, + cipher: string, + 'cipher-params'?: unknown, + size: number, + chunks: [number, string][], + 'name-map'?: Record, + alternatives?: Record +} + +export type ChelContractKey = { + id: string, + name: string, + purpose: string[], + ringLevel: number, + permissions: '*' | string[], + allowedActions?: '*' | string[], + _notBeforeHeight: number, + _notAfterHeight?: number | undefined, + _private?: string, + foreignKey?: string, + meta?: { + quantity?: number, + expires?: number, + private?: { + transient?: boolean, + content?: string, + shareable?: boolean, + oldKeys?: string, + }, + keyRequest?: { + contractID: string, + reference: string, + responded: string + } + } + data: string +} + +export type ChelContractState = { + _vm: { + authorizedKeys: Record, + invites?: Record, + type: string, + pendingWatch?: Record, + keyshares?: Record, + sharedKeyIds?: { id: string, contractID: string, height: number, keyRequestHash?: string, keyRequestHeight?: number }[], + pendingKeyshares?: Record, + props?: Record + }, + _volatile?: { + pendingKeyRequests?: { + contractID: string, + hash: string, + name: string, + reference?: string + }[], + pendingKeyRevocations?: Record, + watch?: [fkName: string, fkId: string][], + dirty?: boolean, + resyncing?: boolean, + } +} + +export type ChelRootState = { + [x: string]: ChelContractState +} & { + contracts: Record +} + +export type Response = { + type: ResType; + err?: string; + data?: JSONType +} + +export type ParsedEncryptedOrUnencryptedMessage = Readonly<{ + contractID: string, + innerSigningKeyId?: string | null | undefined, + encryptionKeyId?: string | null | undefined, + signingKeyId: string, + data: T, + signingContractID?: string | null | undefined, + innerSigningContractID?: string | null | undefined, +}> + +export type ChelKvOnConflictCallback = ( + args: { contractID: string, key: string, failedData?: JSONType, status: number, etag: string | null | undefined, currentData: JSONType | undefined, currentValue: ParsedEncryptedOrUnencryptedMessage | undefined } +) => Promise<[JSONType, string]> diff --git a/src/utils.ts b/src/utils.ts new file mode 100644 index 0000000..0b3ca0e --- /dev/null +++ b/src/utils.ts @@ -0,0 +1,885 @@ +import type { Key } from '@chelonia/crypto' +import { deserializeKey, serializeKey, sign, verifySignature } from '@chelonia/crypto' +import sbp from '@sbp/sbp' +import { Buffer } from 'buffer' +import { has, omit } from 'turtledash' +import type { ProtoSPOpActionUnencrypted, SPKey, SPKeyPurpose, SPKeyUpdate, SPOpActionUnencrypted, SPOpAtomic, SPOpKeyAdd, SPOpKeyDel, SPOpKeyUpdate, SPOpRaw, SPOpValue } from './SPMessage.js' +import { SPMessage } from './SPMessage.js' +import { Secret } from './Secret.js' +import { INVITE_STATUS } from './constants.js' +import type { EncryptedData } from './encryptedData.js' +import { ChelErrorForkedChain, ChelErrorResourceGone, ChelErrorUnexpectedHttpResponseCode, ChelErrorWarning } from './errors.js' +import { CONTRACT_IS_PENDING_KEY_REQUESTS } from './events.js' +import { b64ToStr } from './functions.js' +import type { SignedData } from './signedData.js' +import { isSignedData } from './signedData.js' +import { ChelContractKey, ChelContractState, ChelRootState, CheloniaConfig, CheloniaContext, JSONType } from './types.js' + +const MAX_EVENTS_AFTER = Number.parseInt(process.env.MAX_EVENTS_AFTER || '', 10) || Infinity + +export const findKeyIdByName = (state: ChelContractState, name: string): string | null | undefined => state._vm?.authorizedKeys && Object.values((state._vm.authorizedKeys)).find((k) => k.name === name && k._notAfterHeight == null)?.id + +export const findForeignKeysByContractID = (state: ChelContractState, contractID: string): string[] | undefined => state._vm?.authorizedKeys && ((Object.values((state._vm.authorizedKeys)))).filter((k) => k._notAfterHeight == null && k.foreignKey?.includes(contractID)).map(k => k.id) + +export const findRevokedKeyIdsByName = (state: ChelContractState, name: string): string[] => state._vm?.authorizedKeys && ((Object.values((state._vm.authorizedKeys) || {}))).filter((k) => k.name === name && k._notAfterHeight != null).map(k => k.id) + +export const findSuitableSecretKeyId = (state: ChelContractState, permissions: '*' | string[], purposes: SPKeyPurpose[], ringLevel?: number, allowedActions?: '*' | string[]): string | null | undefined => { + return state._vm?.authorizedKeys && + Object.values((state._vm.authorizedKeys)) + .filter((k) => { + return k._notAfterHeight == null && + (k.ringLevel <= (ringLevel ?? Number.POSITIVE_INFINITY)) && + sbp('chelonia/haveSecretKey', k.id) && + (Array.isArray(permissions) + ? permissions.reduce((acc, permission) => + acc && (k.permissions === '*' || k.permissions.includes(permission)), true + ) + : permissions === k.permissions + ) && + purposes.reduce((acc, purpose) => acc && k.purpose.includes(purpose), true) && + (Array.isArray(allowedActions) + ? allowedActions.reduce((acc, action) => + acc && (k.allowedActions === '*' || !!k.allowedActions?.includes(action)), true + ) + : allowedActions ? allowedActions === k.allowedActions : true + ) + }) + .sort((a, b) => b.ringLevel - a.ringLevel)[0]?.id +} + +export const findContractIDByForeignKeyId = (state: ChelContractState, keyId: string): string | null | undefined => { + let fk: string | undefined + if (!keyId || !(fk = state?._vm?.authorizedKeys?.[keyId]?.foreignKey)) return + + try { + const fkUrl = new URL(fk) + return fkUrl.pathname + } catch {} +} + +// TODO: Resolve inviteKey being added (doesn't have krs permission) +export const findSuitablePublicKeyIds = (state: ChelContractState, permissions: '*' | string[], purposes: SPKeyPurpose[], ringLevel?: number): string[] | null | undefined => { + return state._vm?.authorizedKeys && + Object.values((state._vm.authorizedKeys)).filter((k) => + (k._notAfterHeight == null) && + (k.ringLevel <= (ringLevel ?? Number.POSITIVE_INFINITY)) && + (Array.isArray(permissions) + ? permissions.reduce((acc, permission) => acc && (k.permissions === '*' || k.permissions.includes(permission)), true) + : permissions === k.permissions + ) && + purposes.reduce((acc, purpose) => acc && k.purpose.includes(purpose), true)) + .sort((a, b) => b.ringLevel - a.ringLevel) + .map((k) => k.id) +} + +const validateActionPermissions = (msg: SPMessage, signingKey: SPKey | ChelContractKey, state: { _vm: { authorizedKeys: ChelContractState['_vm']['authorizedKeys'] } }, opT: string, opV: SPOpActionUnencrypted) => { + const data = isSignedData(opV) + ? opV.valueOf() as ProtoSPOpActionUnencrypted + : opV as ProtoSPOpActionUnencrypted + + if ( + signingKey.allowedActions !== '*' && ( + !Array.isArray(signingKey.allowedActions) || + !signingKey.allowedActions.includes(data.action) + ) + ) { + logEvtError(msg, `Signing key ${signingKey.id} is not allowed for action ${data.action}`) + return false + } + + if (isSignedData(opV)) { + const s = opV as SignedData + const innerSigningKey = state._vm?.authorizedKeys?.[s.signingKeyId] + + // For outgoing messages, we may be using an inner signing key that isn't + // available for us to see. In this case, we ignore the missing key. + // For incoming messages, we must check permissions and a missing + // key means no permissions. + if (!innerSigningKey && msg._direction === 'outgoing') return true + + if ( + !innerSigningKey || + !Array.isArray(innerSigningKey.purpose) || + !innerSigningKey.purpose.includes('sig') || + (innerSigningKey.permissions !== '*' && + ( + !Array.isArray(innerSigningKey.permissions) || + !innerSigningKey.permissions.includes(opT + '#inner') + ) + ) + ) { + logEvtError(msg, `Signing key ${s.signingKeyId} is missing permissions for operation ${opT}`) + return false + } + + if ( + innerSigningKey.allowedActions !== '*' && ( + !Array.isArray(innerSigningKey.allowedActions) || + !innerSigningKey.allowedActions.includes(data.action + '#inner') + ) + ) { + logEvtError(msg, `Signing key ${innerSigningKey.id} is not allowed for action ${data.action}`) + return false + } + } + + return true +} + +export const validateKeyPermissions = (msg: SPMessage, config: CheloniaConfig, state: { _vm: { authorizedKeys: ChelContractState['_vm']['authorizedKeys'] } }, signingKeyId: string, opT: string, opV: SPOpValue): boolean => { + const signingKey = state._vm?.authorizedKeys?.[signingKeyId] + if ( + !signingKey || + !Array.isArray(signingKey.purpose) || + !signingKey.purpose.includes('sig') || + (signingKey.permissions !== '*' && + ( + !Array.isArray(signingKey.permissions) || + !signingKey.permissions.includes(opT) + ) + ) + ) { + logEvtError(msg, `Signing key ${signingKeyId} is missing permissions for operation ${opT}`) + return false + } + + if ( + opT === SPMessage.OP_ACTION_UNENCRYPTED && + !validateActionPermissions(msg, signingKey, state, opT, opV as SPOpActionUnencrypted) + ) { + return false + } + + if ( + !config.skipActionProcessing && + opT === SPMessage.OP_ACTION_ENCRYPTED && + !validateActionPermissions(msg, signingKey, state, opT, opV.valueOf() as SPOpActionUnencrypted) + ) { + return false + } + + return true +} + +export const validateKeyAddPermissions = function (this: CheloniaContext, contractID: string, signingKey: ChelContractKey, state: ChelContractState, v: (ChelContractKey | SPKey | EncryptedData)[], skipPrivateCheck?: boolean) { + const signingKeyPermissions = Array.isArray(signingKey.permissions) ? new Set(signingKey.permissions) : signingKey.permissions + const signingKeyAllowedActions = Array.isArray(signingKey.allowedActions) ? new Set(signingKey.allowedActions) : signingKey.allowedActions + if (!state._vm?.authorizedKeys?.[signingKey.id]) throw new Error('Singing key for OP_KEY_ADD or OP_KEY_UPDATE must exist in _vm.authorizedKeys. contractID=' + contractID + ' signingKeyId=' + signingKey.id) + const localSigningKey = state._vm.authorizedKeys[signingKey.id] + v.forEach(wk => { + const data = this.config.unwrapMaybeEncryptedData(wk) + if (!data) return + const k = data.data as SPKey + if (!skipPrivateCheck && signingKey._private && !data.encryptionKeyId) { + throw new Error('Signing key is private but it tried adding a public key') + } + if (!Number.isSafeInteger(k.ringLevel) || k.ringLevel < localSigningKey.ringLevel) { + throw new Error('Signing key has ringLevel ' + localSigningKey.ringLevel + ' but attempted to add or update a key with ringLevel ' + k.ringLevel) + } + if (signingKeyPermissions !== '*') { + if (!Array.isArray(k.permissions) || !k.permissions.reduce((acc, cv) => acc && signingKeyPermissions.has(cv), true)) { + throw new Error('Unable to add or update a key with more permissions than the signing key. signingKey permissions: ' + String(signingKey?.permissions) + '; key add permissions: ' + String(k.permissions)) + } + } + if (signingKeyAllowedActions !== '*' && k.allowedActions) { + if (!signingKeyAllowedActions || !Array.isArray(k.allowedActions) || !k.allowedActions.reduce((acc, cv) => acc && signingKeyAllowedActions.has(cv), true)) { + throw new Error('Unable to add or update a key with more allowed actions than the signing key. signingKey allowed actions: ' + String(signingKey?.allowedActions) + '; key add allowed actions: ' + String(k.allowedActions)) + } + } + }) +} + +export const validateKeyDelPermissions = function (this: CheloniaContext, contractID: string, signingKey: ChelContractKey, state: ChelContractState, v: (string | EncryptedData)[]) { + if (!state._vm?.authorizedKeys?.[signingKey.id]) throw new Error('Singing key for OP_KEY_DEL must exist in _vm.authorizedKeys. contractID=' + contractID + ' signingKeyId=' + signingKey.id) + const localSigningKey = state._vm.authorizedKeys[signingKey.id] + v + .forEach((wid) => { + const data = this.config.unwrapMaybeEncryptedData(wid) + if (!data) return + const id = data.data + const k = state._vm.authorizedKeys[id] + if (!k) { + throw new Error('Nonexisting key ID ' + id) + } + if (signingKey._private) { + throw new Error('Signing key is private') + } + if (!k._private !== !data.encryptionKeyId) { + throw new Error('_private attribute must be preserved') + } + if (!Number.isSafeInteger(k.ringLevel) || k.ringLevel < localSigningKey.ringLevel) { + throw new Error('Signing key has ringLevel ' + localSigningKey.ringLevel + ' but attempted to remove a key with ringLevel ' + k.ringLevel) + } + }) +} + +export const validateKeyUpdatePermissions = function (this: CheloniaContext, contractID: string, signingKey: ChelContractKey, state: ChelContractState, v: (SPKeyUpdate | EncryptedData)[]): [ChelContractKey[], Record] { + const updatedMap = Object.create(null) as Record + const keys = v.map((wuk): ChelContractKey | undefined => { + const data = this.config.unwrapMaybeEncryptedData(wuk) + if (!data) return undefined + const uk = data.data + + const existingKey = state._vm.authorizedKeys[uk.oldKeyId] + if (!existingKey) { + throw new ChelErrorWarning('Missing old key ID ' + uk.oldKeyId) + } + if (!existingKey._private !== !data.encryptionKeyId) { + throw new Error('_private attribute must be preserved') + } + if (uk.name !== existingKey.name) { + throw new Error('Name cannot be updated') + } + if (!uk.id !== !uk.data) { + throw new Error('Both or none of the id and data attributes must be provided. Old key ID: ' + uk.oldKeyId) + } + if (uk.data && existingKey.meta?.private && !(uk.meta?.private)) { + throw new Error('Missing private key. Old key ID: ' + uk.oldKeyId) + } + if (uk.id && uk.id !== uk.oldKeyId) { + updatedMap[uk.id] = uk.oldKeyId + } + // Discard `_notAfterHeight` and `_notBeforeHeight`, since retaining them + // can cause issues reprocessing messages. + // An example is reprocessing old messages in a chatroom using + // `chelonia/in/processMessage`: cloning `_notAfterHeight` will break key + // rotations, since the new key will have the same expiration value as the + // old key (the new key is supposed to have no expiration height). + const updatedKey = omit(existingKey, ['_notAfterHeight', '_notBeforeHeight'])as ChelContractKey + // Set the corresponding updated attributes + if (uk.permissions) { + updatedKey.permissions = uk.permissions + } + if (uk.allowedActions) { + updatedKey.allowedActions = uk.allowedActions + } + if (uk.purpose) { + updatedKey.purpose = uk.purpose as SPKeyPurpose[] + } + if (uk.meta) { + updatedKey.meta = uk.meta + } + if (uk.id) { + updatedKey.id = uk.id + } + if (uk.data) { + updatedKey.data = uk.data + } + return updatedKey + // eslint-disable-next-line no-use-before-define + }).filter(Boolean as unknown as (key: unknown) => key is ChelContractKey) + validateKeyAddPermissions.call(this, contractID, signingKey, state, keys, true) + return [keys, updatedMap] +} + +export const keyAdditionProcessor = function (this: CheloniaContext, _msg: SPMessage, hash: string, keys: (ChelContractKey| SPKey | EncryptedData)[], state: ChelContractState, contractID: string, _signingKey: ChelContractKey, internalSideEffectStack?: (({ state, message }: { state: ChelContractState, message: SPMessage }) => void)[]) { + const decryptedKeys = [] + const keysToPersist: { key: Key, transient: boolean }[] = [] + + const storeSecretKey = (key: SPKey | ChelContractKey, decryptedKey: string) => { + const decryptedDeserializedKey = deserializeKey(decryptedKey) + const transient = !!key.meta?.private?.transient + sbp('chelonia/storeSecretKeys', new Secret([{ + key: decryptedDeserializedKey, + // We always set this to true because this could be done from + // an outgoing message + transient: true + }])) + if (!transient) { + keysToPersist.push({ key: decryptedDeserializedKey, transient }) + } + } + + for (const wkey of keys) { + const data = this.config.unwrapMaybeEncryptedData(wkey) + if (!data) continue + const key = data.data + let decryptedKey: string | null | undefined + // Does the key have key.meta?.private? If so, attempt to decrypt it + if (key.meta?.private && key.meta.private.content) { + if ( + key.id && + key.meta.private.content && + !sbp('chelonia/haveSecretKey', key.id, !key.meta.private.transient) + ) { + const decryptedKeyResult = this.config.unwrapMaybeEncryptedData(key.meta.private.content) + // Ignore data that couldn't be decrypted + if (decryptedKeyResult) { + // Data aren't encrypted + if (decryptedKeyResult.encryptionKeyId == null) { + throw new Error('Expected encrypted data but got unencrypted data for key with ID: ' + key.id) + } + decryptedKey = decryptedKeyResult.data + decryptedKeys.push([key.id, decryptedKey]) + storeSecretKey(key, decryptedKey) + } + } + } + + // Is this a #sak + if (key.name === '#sak') { + if (data.encryptionKeyId) { + throw new Error('#sak may not be encrypted') + } + if (key.permissions && (!Array.isArray(key.permissions) || key.permissions.length !== 0)) { + throw new Error('#sak may not have permissions') + } + if (!Array.isArray(key.purpose) || key.purpose.length !== 1 || key.purpose[0] !== 'sak') { + throw new Error("#sak must have exactly one purpose: 'sak'") + } + if (key.ringLevel !== 0) { + throw new Error('#sak must have ringLevel 0') + } + } + + // Is this a an invite key? If so, run logic for invite keys and invitation + // accounting + if (key.name.startsWith('#inviteKey-')) { + if (!state._vm.invites) state._vm.invites = Object.create(null) + const inviteSecret = decryptedKey || ( + has(this.transientSecretKeys, key.id) + ? serializeKey(this.transientSecretKeys[key.id], true) + : undefined + ) + state._vm.invites![key.id] = { + status: INVITE_STATUS.VALID, + initialQuantity: key.meta!.quantity!, + quantity: key.meta!.quantity!, + expires: key.meta!.expires!, + inviteSecret: inviteSecret!, + responses: [] + } + } + + // Is this KEY operation the result of requesting keys for another contract? + if (key.meta?.keyRequest?.contractID && findSuitableSecretKeyId(state, [SPMessage.OP_KEY_ADD], ['sig'])) { + const data = this.config.unwrapMaybeEncryptedData(key.meta.keyRequest.contractID) + + // Are we subscribed to this contract? + // If we are not subscribed to the contract, we don't set pendingKeyRequests because we don't need that contract's state + // Setting pendingKeyRequests in these cases could result in issues + // when a corresponding OP_KEY_SHARE is received, which could trigger subscribing to this previously unsubscribed to contract + if (data && internalSideEffectStack) { + const keyRequestContractID = data.data + const reference = this.config.unwrapMaybeEncryptedData(key.meta.keyRequest.reference) + + // Since now we'll make changes to keyRequestContractID, we need to + // do this while no other operations are running for that + // contract + internalSideEffectStack.push(() => { + sbp('chelonia/private/queueEvent', keyRequestContractID, () => { + const rootState = sbp(this.config.stateSelector) as ChelRootState + + const originatingContractState = rootState[contractID] as ChelContractState + if (sbp('chelonia/contract/hasKeyShareBeenRespondedBy', originatingContractState, keyRequestContractID, reference)) { + // In the meantime, our key request has been responded, so we + // don't need to set pendingKeyRequests. + return + } + + if (!has(rootState, keyRequestContractID)) this.config.reactiveSet(rootState, keyRequestContractID, Object.create(null)) + const targetState = rootState[keyRequestContractID] as ChelContractState + + if (!targetState._volatile) { + this.config.reactiveSet(targetState, '_volatile', Object.create(null)) + } + if (!targetState._volatile!.pendingKeyRequests) { + this.config.reactiveSet(rootState[keyRequestContractID]._volatile!, 'pendingKeyRequests', []) + } + + if (targetState._volatile!.pendingKeyRequests!.some((pkr) => { + return pkr && pkr.contractID === contractID && pkr.hash === hash + })) { + // This pending key request has already been registered. + // Nothing left to do. + return + } + + // Mark the contract for which keys were requested as pending keys + // The hash (of the current message) is added to this dictionary + // for cross-referencing puposes. + targetState._volatile!.pendingKeyRequests!.push({ contractID, name: key.name, hash, reference: reference?.data }) + + this.setPostSyncOp(contractID, 'pending-keys-for-' + keyRequestContractID, ['okTurtles.events/emit', CONTRACT_IS_PENDING_KEY_REQUESTS, { contractID: keyRequestContractID }]) + }).catch((e: unknown) => { + // Using console.error instead of logEvtError because this + // is a side-effect and not relevant for outgoing messages + console.error('Error while setting or updating pendingKeyRequests', { contractID, keyRequestContractID, reference }, e) + }) + }) + } + } + } + + // Any persistent keys are stored as a side-effect + if (keysToPersist.length) { + internalSideEffectStack?.push(() => { + sbp('chelonia/storeSecretKeys', new Secret(keysToPersist)) + }) + } + internalSideEffectStack?.push(() => subscribeToForeignKeyContracts.call(this, contractID, state)) +} + +export const subscribeToForeignKeyContracts = function (this: CheloniaContext, contractID: string, state: ChelContractState) { + try { + Object.values(state._vm.authorizedKeys).filter((key) => !!((key)).foreignKey && findKeyIdByName(state, ((key)).name) != null).forEach((key) => { + const foreignKey = String(key.foreignKey) + const fkUrl = new URL(foreignKey) + const foreignContract = fkUrl.pathname + const foreignKeyName = fkUrl.searchParams.get('keyName') + + if (!foreignContract || !foreignKeyName) { + console.warn('Invalid foreign key: missing contract or key name', { contractID, keyId: key.id }) + return + } + + const rootState = sbp(this.config.stateSelector) + + const signingKey = findSuitableSecretKeyId(state, [SPMessage.OP_KEY_DEL], ['sig'], key.ringLevel) + const canMirrorOperations = !!signingKey + + // If we cannot mirror operations, then there is nothing left to do + if (!canMirrorOperations) return + + // If the key is already being watched, do nothing + if (Array.isArray(rootState?.[foreignContract]?._volatile?.watch)) { + if ((rootState[foreignContract] as ChelContractState)._volatile!.watch!.find((v) => + v[0] === key.name && v[1] === contractID + )) return + } + + if (!has(state._vm, 'pendingWatch')) this.config.reactiveSet(state._vm, 'pendingWatch', Object.create(null)) + if (!has(state._vm.pendingWatch, foreignContract)) this.config.reactiveSet(state._vm.pendingWatch!, foreignContract, []) + if (!state._vm.pendingWatch![foreignContract].find(([n]) => n === foreignKeyName)) { + state._vm.pendingWatch![foreignContract].push([foreignKeyName, key.id]) + } + + this.setPostSyncOp(contractID, `watchForeignKeys-${contractID}`, ['chelonia/private/watchForeignKeys', contractID]) + }) + } catch (e: unknown) { + console.warn('Error at subscribeToForeignKeyContracts: ' + ((e as Error).message || e)) + } +} + +// Messages might be sent before receiving already posted messages, which will +// result in a conflict +// When resending a message, race conditions might also occur (for example, if +// key rotation is required and there are many clients simultaneously online, it +// may be performed by all connected clients at once). +// The following function handles re-signing of messages when a conflict +// occurs (required because the message's previousHEAD will change) as well as +// duplicate operations. For operations involving keys, the payload will be +// rewritten to eliminate no-longer-relevant keys. In most cases, this would +// result in an empty payload, in which case the message is omitted entirely. +export const recreateEvent = (entry: SPMessage, state: ChelContractState, contractsState: ChelRootState['contracts'][string]): undefined | SPMessage => { + const { HEAD: previousHEAD, height: previousHeight, previousKeyOp } = contractsState || {} + if (!previousHEAD) { + throw new Error('recreateEvent: Giving up because the contract has been removed') + } + const head = entry.head() + + const [opT, rawOpV] = entry.rawOp() + + const recreateOperation = (opT: string, rawOpV: SignedData) => { + const opV = rawOpV.valueOf() + const recreateOperationInternal = (opT: string, opV: SPOpValue): SPOpValue | typeof undefined => { + let newOpV: SPOpValue + if (opT === SPMessage.OP_KEY_ADD) { + if (!Array.isArray(opV)) throw new Error('Invalid message format') + newOpV = (opV as SPOpKeyAdd).filter((k) => { + const kId = (k.valueOf() as SPKey).id + return !has(state._vm.authorizedKeys, kId) || state._vm.authorizedKeys[kId]._notAfterHeight != null + }) + // Has this key already been added? (i.e., present in authorizedKeys) + if (newOpV.length === 0) { + console.info('Omitting empty OP_KEY_ADD', { head }) + } else if (newOpV.length === opV.length) { + return opV + } + } else if (opT === SPMessage.OP_KEY_DEL) { + if (!Array.isArray(opV)) throw new Error('Invalid message format') + // Has this key already been removed? (i.e., no longer in authorizedKeys) + newOpV = (opV as SPOpKeyDel).filter((keyId) => { + const kId = Object(keyId).valueOf() + return has(state._vm.authorizedKeys, kId) && state._vm.authorizedKeys[kId]._notAfterHeight == null + }) + if (newOpV.length === 0) { + console.info('Omitting empty OP_KEY_DEL', { head }) + } else if (newOpV.length === opV.length) { + return opV + } + } else if (opT === SPMessage.OP_KEY_UPDATE) { + if (!Array.isArray(opV)) throw new Error('Invalid message format') + // Has this key already been replaced? (i.e., no longer in authorizedKeys) + newOpV = (opV as SPOpKeyUpdate).filter((k) => { + const oKId = (k.valueOf() as SPKeyUpdate).oldKeyId + const nKId = (k.valueOf() as SPKeyUpdate).id + return nKId == null || (has(state._vm.authorizedKeys, oKId) && state._vm.authorizedKeys[oKId]._notAfterHeight == null) + }) + if (newOpV.length === 0) { + console.info('Omitting empty OP_KEY_UPDATE', { head }) + } else if (newOpV.length === opV.length) { + return opV + } + } else if (opT === SPMessage.OP_ATOMIC) { + if (!Array.isArray(opV)) throw new Error('Invalid message format') + newOpV = (opV as SPOpAtomic).map(([t, v]) => [t, recreateOperationInternal(t, v)]).filter(([, v]) => !!v) as SPOpAtomic + if ((newOpV as SPOpAtomic).length === 0) { + console.info('Omitting empty OP_ATOMIC', { head }) + } else if ((newOpV as SPOpAtomic).length === opV.length && (newOpV as SPOpAtomic).reduce((acc, cv, i) => acc && cv === opV[i], true)) { + return opV + } else { + return newOpV + } + } else { + return opV + } + } + + const newOpV = recreateOperationInternal(opT, opV) + + if (newOpV === opV) { + return rawOpV + } else if (newOpV === undefined) { + return + } + + if (typeof rawOpV.recreate !== 'function') { + throw new Error('Unable to recreate operation') + } + + return rawOpV.recreate(newOpV) + } + + const newRawOpV = recreateOperation(opT, rawOpV) + + if (!newRawOpV) return + + const newOp = [opT, newRawOpV] + + entry = SPMessage.cloneWith( + head, newOp as SPOpRaw, { previousKeyOp, previousHEAD, height: previousHeight + 1 } + ) + + return entry +} + +export const getContractIDfromKeyId = (contractID: string, signingKeyId: string | null | undefined, state: ChelContractState): string | null | undefined => { + if (!signingKeyId) return + return signingKeyId && state._vm?.authorizedKeys?.[signingKeyId]?.foreignKey + ? new URL(state._vm.authorizedKeys[signingKeyId].foreignKey!).pathname + : contractID +} + +export function eventsAfter (this: CheloniaContext, contractID: string, { sinceHeight, limit, sinceHash, stream = true }: { sinceHeight: number, limit?: number, sinceHash?: string, stream: boolean }): ReadableStream | Promise { + if (!contractID) { + // Avoid making a network roundtrip to tell us what we already know + throw new Error('Missing contract ID') + } + + let lastUrl: string + const fetchEventsStreamReader = async () => { + requestLimit = Math.min(limit ?? MAX_EVENTS_AFTER, remainingEvents) + lastUrl = `${this.config.connectionURL}/eventsAfter/${contractID}/${sinceHeight}${Number.isInteger(requestLimit) ? `/${requestLimit}` : ''}` + const eventsResponse = await this.config.fetch(lastUrl, { signal }) + if (!eventsResponse.ok) { + const msg = `${eventsResponse.status}: ${eventsResponse.statusText}` + if (eventsResponse.status === 404 || eventsResponse.status === 410) throw new ChelErrorResourceGone(msg, { cause: eventsResponse.status }) + throw new ChelErrorUnexpectedHttpResponseCode(msg, { cause: eventsResponse.status }) + } + if (!eventsResponse.body) throw new Error('Missing body') + latestHeight = parseInt(eventsResponse.headers.get('shelter-headinfo-height')!, 10) + if (!Number.isSafeInteger(latestHeight)) throw new Error('Invalid latest height') + requestCount++ + return eventsResponse.body.getReader() + } + if (!Number.isSafeInteger(sinceHeight) || sinceHeight < 0) { + throw new TypeError('Invalid since height value. Expected positive integer.') + } + const signal = this.abortController.signal + let requestCount = 0 + let remainingEvents = limit ?? Number.POSITIVE_INFINITY + let eventsStreamReader: ReadableStreamDefaultReader + let latestHeight: number + let state: 'fetch' | 'read-eos' | 'read-new-response' | 'read' | 'events' | 'eod' = 'fetch' + let requestLimit: number + let count: number + let buffer: string = '' + let currentEvent: string + // return ReadableStream with a custom pull function to handle streamed data + const s = new ReadableStream({ + // The pull function is called whenever the internal buffer of the stream + // becomes empty and needs more data. + async pull (controller) { + try { + for (;;) { + // Handle different states of the stream reading process. + switch (state) { + // When in 'fetch' state, initiate a new fetch request to obtain a + // stream reader for events. + case 'fetch': { + eventsStreamReader = await fetchEventsStreamReader() + // Transition to reading the new response and reset the processed + // events counter + state = 'read-new-response' + count = 0 + break + } + case 'read-eos': // End of stream case + case 'read-new-response': // Just started reading a new response + case 'read': { // Reading from the response stream + const { done, value } = await eventsStreamReader.read() + // If done, determine if the stream should close or fetch more + // data by making a new request + if (done) { + // No more events to process or reached the latest event + // Using `>=` instead of `===` to avoid an infinite loop in the + // event of data loss on the server. + if (remainingEvents === 0 || sinceHeight >= latestHeight) { + controller.close() + return + } else if (state === 'read-new-response' || buffer) { + // If done prematurely, throw an error + throw new Error('Invalid response: done too early') + } else { + // If there are still events to fetch, switch state to fetch + state = 'fetch' + break + } + } + if (!value) { + // If there's no value (e.g., empty response), throw an error + throw new Error('Invalid response: missing body') + } + // Concatenate new data to the buffer, trimming any + // leading/trailing whitespace (the response is a JSON array of + // base64-encoded data, meaning that whitespace is not significant) + buffer = buffer + Buffer.from(value).toString().trim() + // If there was only whitespace, try reading again + if (!buffer) break + if (state === 'read-new-response') { + // Response is in JSON format, so we look for the start of an + // array (`[`) + if (buffer[0] !== '[') { + throw new Error('Invalid response: no array start delimiter') + } + // Trim the array start delimiter from the buffer + buffer = buffer.slice(1) + } else if (state === 'read-eos') { + // If in 'read-eos' state and still reading data, it's an error + // because the response isn't valid JSON (there should be + // nothing other than whitespace after `]`) + throw new Error('Invalid data at the end of response') + } + // If not handling new response or end-of-stream, switch to + // processing events + state = 'events' + break + } + case 'events': { + // Process events by looking for a comma or closing bracket that + // indicates the end of an event + const nextIdx = buffer.search(/(?<=\s*)[,\]]/) + // If the end of the event isn't found, go back to reading more + // data + if (nextIdx < 0) { + state = 'read' + break + } + let enqueued = false + try { + // Extract the current event's value and trim whitespace + const eventValue = buffer.slice(0, nextIdx).trim() + if (eventValue) { + // Check if the event limit is reached; if so, throw an error + if (count === requestLimit) { + throw new Error('Received too many events') + } + currentEvent = JSON.parse(b64ToStr(JSON.parse(eventValue))).message + if (count === 0) { + const hash = SPMessage.deserializeHEAD(currentEvent).hash + const height = SPMessage.deserializeHEAD(currentEvent).head.height + if (height !== sinceHeight || (sinceHash && sinceHash !== hash)) { + if (height === sinceHeight && sinceHash && sinceHash !== hash) { + throw new ChelErrorForkedChain(`Forked chain: hash(${hash}) !== since(${sinceHash})`) + } else { + throw new Error(`Unexpected data: hash(${hash}) !== since(${sinceHash || ''}) or height(${height}) !== since(${sinceHeight})`) + } + } + } + // If this is the first event in a second or later request, + // drop the event because it's already been included in + // a previous response + if (count++ !== 0 || requestCount !== 0) { + controller.enqueue(currentEvent) + enqueued = true + remainingEvents-- + } + } + // If the stream is finished (indicated by a closing bracket), + // update `since` (to make the next request if needed) and + // switch to 'read-eos'. + if (buffer[nextIdx] === ']') { + if (currentEvent) { + const deserialized = SPMessage.deserializeHEAD(currentEvent) + sinceHeight = deserialized.head.height + sinceHash = deserialized.hash + state = 'read-eos' + } else { + // If the response came empty, assume there are no more events + // after. Mostly this prevents infinite loops if a server is + // claiming there are more events than it's willing to return + // data for. + state = 'eod' + } + // This should be an empty string now + buffer = buffer.slice(nextIdx + 1).trim() + } else if (currentEvent) { + // Otherwise, move the buffer pointer to the next event + buffer = buffer.slice(nextIdx + 1).trimStart() + } else { + // If the end delimiter (`]`) is missing, throw an error + throw new Error('Missing end delimiter') + } + // If an event was successfully enqueued, exit the loop to wait + // for the next pull request + if (enqueued) { + return + } + } catch (e) { + console.error('[chelonia] Error during event parsing', e) + throw e + } + break + } + case 'eod': { + if (remainingEvents === 0 || sinceHeight >= latestHeight) { + controller.close() + } else { + throw new Error('Unexpected end of data') + } + return + } + } + } + } catch (e) { + console.error('[eventsAfter] Error', { lastUrl }, e) + eventsStreamReader?.cancel('Error during pull').catch(e2 => { + console.error('Error canceling underlying event stream reader on error', e, e2) + }) + throw e + } + } + }) + if (stream) return s + // Workaround for + return collectEventStream(s) +} + +export function buildShelterAuthorizationHeader (this: CheloniaContext, contractID: string, state?: ChelContractState): string { + if (!state) state = sbp(this.config.stateSelector)[contractID] + const SAKid = findKeyIdByName(state!, '#sak') + if (!SAKid) { + throw new Error(`Missing #sak in ${contractID}`) + } + const SAK = this.transientSecretKeys[SAKid] + if (!SAK) { + throw new Error(`Missing secret #sak (${SAKid}) in ${contractID}`) + } + const deserializedSAK = typeof SAK === 'string' ? deserializeKey(SAK) : SAK + + const nonceBytes = new Uint8Array(15) + globalThis.crypto.getRandomValues(nonceBytes) + + // . + const data = `${contractID} ${sbp('chelonia/time')}.${Buffer.from(nonceBytes).toString('base64')}` + + // shelter .. + return `shelter ${data}.${sign(deserializedSAK, data)}` +} + +export function verifyShelterAuthorizationHeader (authorization: string, rootState?: object): string { + const regex = /^shelter (([a-zA-Z0-9]+) ([0-9]+)\.([a-zA-Z0-9+/=]{20}))\.([a-zA-Z0-9+/=]+)$/i + if (authorization.length > 1024) { + throw new Error('Authorization header too long') + } + const matches = authorization.match(regex) + if (!matches) { + throw new Error('Unable to parse shelter authorization header') + } + // TODO: Remember nonces and reject already used ones + const [, data, contractID, timestamp, , signature] = matches + if (Math.abs(parseInt(timestamp) - Date.now()) > 60e3) { + throw new Error('Invalid signature time range') + } + if (!rootState) rootState = sbp('chelonia/rootState') + if (!has(rootState, contractID)) { + throw new Error(`Contract ${contractID} from shelter authorization header not found`) + } + const SAKid = findKeyIdByName(rootState![contractID] as ChelContractState, '#sak') + if (!SAKid) { + throw new Error(`Missing #sak in ${contractID}`) + } + const SAK = (rootState![contractID] as ChelContractState)._vm.authorizedKeys[SAKid].data + if (!SAK) { + throw new Error(`Missing secret #sak (${SAKid}) in ${contractID}`) + } + const deserializedSAK = deserializeKey(SAK) + + verifySignature(deserializedSAK, data, signature) + + return contractID +} + +export const clearObject = (o: object) => { + Object.keys(o).forEach((k) => delete o[k as keyof typeof o]) +} + +export const reactiveClearObject = (o: T, fn: (o: T, k: keyof T) => void) => { + Object.keys(o).forEach((k) => fn(o, k as keyof T)) +} + +export const checkCanBeGarbageCollected = function (this: CheloniaContext, id: string): boolean { + const rootState = sbp(this.config.stateSelector) + return ( + // Check persistent references + (!has(rootState.contracts, id) || !rootState.contracts[id] || !has(rootState.contracts[id], 'references')) && + // Check ephemeral references + !has(this.ephemeralReferenceCount, id)) && + // Check foreign keys (i.e., that no keys from this contract are being watched) + (!has(rootState, id) || !has(rootState[id], '_volatile') || !has(rootState[id]._volatile, 'watch') || rootState[id]._volatile.watch.length === 0 || (rootState[id] as ChelContractState)._volatile!.watch!.filter(([, cID]) => this.subscriptionSet.has(cID)).length === 0) +} + +export const collectEventStream = async (s: ReadableStream): Promise => { + const reader = s.getReader() + const r = [] + for (;;) { + const { done, value } = await reader.read() + if (done) break + r.push(value) + } + return r +} + +// Used inside processing functions for displaying errors at the 'warn' level +// for outgoing messages to increase the signal-to-noise error. See issue #2773. +export const logEvtError = (msg: SPMessage, ...args: unknown[]) => { + if (msg._direction === 'outgoing') { + console.warn(...args) + } else { + console.error(...args) + } +} + +export const handleFetchResult = (type: 'text' | 'json' | 'blob'): ((r: Response) => Promise) => { + return function (r: Response) { + if (!r.ok) { + const msg = `${r.status}: ${r.statusText}` + // 410 is sometimes special (for example, it can mean that a contract or + // a file been deleted) + if (r.status === 404 || r.status === 410) throw new ChelErrorResourceGone(msg, { cause: r.status }) + throw new ChelErrorUnexpectedHttpResponseCode(msg, { cause: r.status }) + } + return r[type]() + } +} diff --git a/tsconfig.cjs.json b/tsconfig.cjs.json new file mode 100644 index 0000000..eae0c87 --- /dev/null +++ b/tsconfig.cjs.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "nodenext", + "moduleResolution": "nodenext", + "outDir": "dist/cjs", + "strict": true, + "strictNullChecks": true, + "alwaysStrict": true, + "noUnusedLocals": true, + "esModuleInterop": true, + "skipLibCheck": false, + "forceConsistentCasingInFileNames": true, + "allowJs": true, + "checkJs": true + }, + "ts-node": { + "experimentalResolver": true + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist/**/*", "**/*.spec.ts", "**/*.test.ts"] +} \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..caae972 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "esnext", + "moduleResolution": "bundler", + "outDir": "dist/esm", + "strict": true, + "strictNullChecks": true, + "alwaysStrict": true, + "noUnusedLocals": true, + "esModuleInterop": true, + "skipLibCheck": false, + "forceConsistentCasingInFileNames": true, + "allowJs": true, + "checkJs": true + }, + "ts-node": { + "experimentalResolver": true + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist/**/*"] +} \ No newline at end of file