diff --git a/README.md b/README.md index 9de1fa17..cc768fb2 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ Entity keys in Camunda 8 are stored and represented as `int64` numbers. The rang Some number values - for example: "_total returned results_ " - may be specified as `int64` in the API specifications. Although these numbers will usually not contain unsafe values, they are always serialised to `string`. -For `int64` values whose type is not known ahead of time, such as job variables, you can pass an annotated data transfer object (DTO) to decode them reliably. If no DTO is specified, the default behavior of the SDK is to serialise all numbers to JavaScript `number`, and if a number value is detected at a runtime that cannot be accurately stored as `number`, to throw an exception. +For `int64` values whose type is not known ahead of time, such as job variables, you can pass an annotated data transfer object (DTO) to decode them reliably. If no DTO is specified, the default behavior of the SDK is to serialise all numbers to JavaScript `number`, and to throw an exception if a number value is detected at a runtime that cannot be accurately represented as the JavaScript `number` type (that is, a value greater than 2^53-1). ## Authorization @@ -222,9 +222,31 @@ Here is an example of turning on debugging for the OAuth and Operate components: DEBUG=camunda:oauth,camunda:operate node app.js ``` +## Process Variable Typing + +Process variables - the `variables` of Zeebe messages, jobs, and process instance creation requests and responses - are stored in the broker as key:value pairs. They are transported as a JSON string. The SDK parses the JSON string into a JavaScript object. + +Various Zeebe methods accept DTO classes for variable input and output. These DTO classes are used to provide design-time type information on the `variables` object. They are also used to safely decode 64-bit integer values that cannot be accurately represented by the JavaScript `number` type. + +To create a DTO to represent the expected shape and type of the `variables` object, extend the `LosslessDto` class: + +```typescript +class myVariableDTO extends LosslessDto { + firstName!: string + lastName!: string + age!: number + optionalValue?: number + @Int64String + veryBigInteger?: string + constructor(data: Partial) { + super(data) + } +} +``` + ## Typing of Zeebe worker variables -The variable payload in a Zeebe worker task handler is available as an object `job.variables`. By default, this is of type `any`. +The variable payload in a Zeebe worker task handler is available as an object `job.variables`. By default, this is of type `any` for the gRPC API, and `unknown` for the REST API. The `ZBClient.createWorker()` method accepts an `inputVariableDto` to control the parsing of number values and provide design-time type information. Passing an `inputVariableDto` class to a Zeebe worker is optional. If a DTO class is passed to the Zeebe worker, it is used for two purposes: diff --git a/docker/.env b/docker/.env index c72a4440..f709eba4 100644 --- a/docker/.env +++ b/docker/.env @@ -2,7 +2,8 @@ # CAMUNDA_CONNECTORS_VERSION=0.23.2 CAMUNDA_CONNECTORS_VERSION=8.5.0 CAMUNDA_OPTIMIZE_VERSION=8.5.0 -CAMUNDA_PLATFORM_VERSION=8.5.0 +CAMUNDA_PLATFORM_VERSION=8.6.0 +CAMUNDA_ZEEBE_VERSION=8.6.3 CAMUNDA_WEB_MODELER_VERSION=8.5.0 ELASTIC_VERSION=8.9.0 KEYCLOAK_SERVER_VERSION=22.0.3 diff --git a/docker/docker-compose-multitenancy.yaml b/docker/docker-compose-multitenancy.yaml index 366b1f95..2a84221b 100644 --- a/docker/docker-compose-multitenancy.yaml +++ b/docker/docker-compose-multitenancy.yaml @@ -11,7 +11,7 @@ services: zeebe: # https://docs.camunda.io/docs/self-managed/platform-deployment/docker/#zeebe - image: camunda/zeebe:${CAMUNDA_PLATFORM_VERSION} + image: camunda/zeebe:${CAMUNDA_ZEEBE_VERSION} container_name: zeebe ports: - "26500:26500" diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 9f93b71f..22237010 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -11,7 +11,7 @@ services: zeebe: # https://docs.camunda.io/docs/self-managed/platform-deployment/docker/#zeebe - image: camunda/zeebe:${CAMUNDA_PLATFORM_VERSION} + image: camunda/zeebe:${CAMUNDA_ZEEBE_VERSION} container_name: zeebe ports: - "26500:26500" diff --git a/package-lock.json b/package-lock.json index ff0c6094..b1e79e7a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7,15 +7,17 @@ "": { "name": "@camunda8/sdk", "version": "8.6.2", - "license": "Apache 2.0", + "license": "Apache-2.0", "dependencies": { "@grpc/grpc-js": "1.10.9", "@grpc/proto-loader": "0.7.13", + "@types/form-data": "^2.2.1", "chalk": "^2.4.2", "console-stamp": "^3.0.2", "dayjs": "^1.8.15", "debug": "^4.3.4", "fast-xml-parser": "^4.1.3", + "form-data": "^4.0.1", "got": "^11.8.6", "jwt-decode": "^4.0.0", "lodash.mergewith": "^4.6.2", @@ -26,7 +28,8 @@ "reflect-metadata": "^0.2.1", "stack-trace": "0.0.10", "typed-duration": "^1.0.12", - "uuid": "^7.0.3" + "uuid": "^7.0.3", + "winston": "^3.14.2" }, "devDependencies": { "@commitlint/cli": "^18.4.3", @@ -55,6 +58,7 @@ "eslint-plugin-import": "^2.29.1", "eslint-plugin-prettier": "^5.0.1", "express": "^4.19.2", + "get-port-please": "^3.1.2", "grpc-tools": "^1.12.4", "husky": "^8.0.3", "jest": "^29.7.0", @@ -67,6 +71,7 @@ "ts-protoc-gen": "^0.15.0", "tsconfig-paths": "^4.2.0", "tsd": "^0.31.0", + "typed-emitter": "^2.1.0", "typedoc": "^0.25.9", "typedoc-plugin-include-example": "^1.2.0", "typedoc-plugin-missing-exports": "^2.2.0", @@ -1300,6 +1305,16 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, + "node_modules/@dabh/diagnostics": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz", + "integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==", + "dependencies": { + "colorspace": "1.1.x", + "enabled": "2.0.x", + "kuler": "^2.0.0" + } + }, "node_modules/@eslint-community/eslint-utils": { "version": "4.4.0", "dev": true, @@ -3503,6 +3518,16 @@ "tar-fs": "^2.0.0" } }, + "node_modules/@sitapati/testcontainers/node_modules/get-port": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-4.2.0.tgz", + "integrity": "sha512-/b3jarXkH8KJoOMQc3uVGHASwGLPq3gSFJ7tgJm2diza+bydJPTGOibin2steecKeOylE8oY2JERlVWkAJO6yw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/@szmarczak/http-timer": { "version": "4.0.6", "license": "MIT", @@ -3679,6 +3704,15 @@ "@types/send": "*" } }, + "node_modules/@types/form-data": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz", + "integrity": "sha512-JAMFhOaHIciYVh8fb5/83nmuO/AHwmto+Hq7a9y8FzLDcC1KCU344XDOMEmahnrTFlHjgh4L0WJFczNIX2GxnQ==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/graceful-fs": { "version": "4.1.9", "dev": true, @@ -3872,6 +3906,11 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/triple-beam": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/triple-beam/-/triple-beam-1.3.5.tgz", + "integrity": "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==" + }, "node_modules/@types/uuid": { "version": "9.0.8", "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", @@ -4499,9 +4538,13 @@ "node": ">=0.10.0" } }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==" + }, "node_modules/asynckit": { "version": "0.4.0", - "dev": true, "license": "MIT" }, "node_modules/at-least-node": { @@ -4761,9 +4804,9 @@ } }, "node_modules/body-parser": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", - "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", "dev": true, "dependencies": { "bytes": "3.1.2", @@ -4774,7 +4817,7 @@ "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.11.0", + "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" @@ -5006,13 +5049,19 @@ } }, "node_modules/call-bind": { - "version": "1.0.5", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dev": true, - "license": "MIT", "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.1", - "set-function-length": "^1.1.1" + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -5188,8 +5237,9 @@ }, "node_modules/cli-truncate": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-4.0.0.tgz", + "integrity": "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==", "dev": true, - "license": "MIT", "dependencies": { "slice-ansi": "^5.0.0", "string-width": "^7.0.0" @@ -5202,9 +5252,10 @@ } }, "node_modules/cli-truncate/node_modules/ansi-regex": { - "version": "6.0.1", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true, - "license": "MIT", "engines": { "node": ">=12" }, @@ -5213,14 +5264,16 @@ } }, "node_modules/cli-truncate/node_modules/emoji-regex": { - "version": "10.3.0", - "dev": true, - "license": "MIT" + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", + "dev": true }, "node_modules/cli-truncate/node_modules/string-width": { - "version": "7.0.0", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", "dev": true, - "license": "MIT", "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", @@ -5235,8 +5288,9 @@ }, "node_modules/cli-truncate/node_modules/strip-ansi": { "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "dev": true, - "license": "MIT", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -5299,6 +5353,15 @@ "dev": true, "license": "MIT" }, + "node_modules/color": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", + "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", + "dependencies": { + "color-convert": "^1.9.3", + "color-string": "^1.6.0" + } + }, "node_modules/color-convert": { "version": "1.9.3", "license": "MIT", @@ -5310,6 +5373,15 @@ "version": "1.1.3", "license": "MIT" }, + "node_modules/color-string": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", + "dependencies": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, "node_modules/color-support": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", @@ -5321,12 +5393,21 @@ }, "node_modules/colorette": { "version": "2.0.20", - "dev": true, - "license": "MIT" + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "dev": true + }, + "node_modules/colorspace": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz", + "integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==", + "dependencies": { + "color": "^3.1.3", + "text-hex": "1.0.x" + } }, "node_modules/combined-stream": { "version": "1.0.8", - "dev": true, "license": "MIT", "dependencies": { "delayed-stream": "~1.0.0" @@ -5336,11 +5417,12 @@ } }, "node_modules/commander": { - "version": "11.1.0", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", + "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", "dev": true, - "license": "MIT", "engines": { - "node": ">=16" + "node": ">=18" } }, "node_modules/commitizen": { @@ -6224,16 +6306,20 @@ } }, "node_modules/define-data-property": { - "version": "1.1.1", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dev": true, - "license": "MIT", "dependencies": { - "get-intrinsic": "^1.2.1", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/define-properties": { @@ -6254,7 +6340,6 @@ }, "node_modules/delayed-stream": { "version": "1.0.0", - "dev": true, "license": "MIT", "engines": { "node": ">=0.4.0" @@ -6545,10 +6630,15 @@ "dev": true, "license": "MIT" }, + "node_modules/enabled": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz", + "integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==" + }, "node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", "dev": true, "engines": { "node": ">= 0.8" @@ -6697,6 +6787,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/environment": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", + "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/err-code": { "version": "1.1.2", "license": "MIT" @@ -6761,6 +6863,27 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-set-tostringtag": { "version": "2.0.2", "dev": true, @@ -7431,6 +7554,12 @@ "node": ">= 0.6" } }, + "node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "dev": true + }, "node_modules/execa": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/execa/-/execa-3.4.0.tgz", @@ -7486,37 +7615,37 @@ } }, "node_modules/express": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", - "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz", + "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==", "dev": true, "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.2", + "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.2.0", + "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", + "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", + "path-to-regexp": "0.1.10", "proxy-addr": "~2.0.7", - "qs": "6.11.0", + "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", + "send": "0.19.0", + "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", @@ -7622,7 +7751,9 @@ "license": "MIT" }, "node_modules/fast-xml-parser": { - "version": "4.3.2", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.0.tgz", + "integrity": "sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg==", "funding": [ { "type": "github", @@ -7633,7 +7764,6 @@ "url": "https://paypal.me/naturalintelligence" } ], - "license": "MIT", "dependencies": { "strnum": "^1.0.5" }, @@ -7657,6 +7787,11 @@ "bser": "2.1.1" } }, + "node_modules/fecha": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.3.tgz", + "integrity": "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==" + }, "node_modules/figures": { "version": "3.2.0", "dev": true, @@ -7695,13 +7830,13 @@ } }, "node_modules/finalhandler": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", - "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", "dev": true, "dependencies": { "debug": "2.6.9", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", @@ -7810,6 +7945,11 @@ "dev": true, "license": "ISC" }, + "node_modules/fn.name": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz", + "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==" + }, "node_modules/for-each": { "version": "0.3.3", "dev": true, @@ -7819,10 +7959,10 @@ } }, "node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", - "dev": true, + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz", + "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -7994,8 +8134,9 @@ }, "node_modules/get-east-asian-width": { "version": "1.2.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.2.0.tgz", + "integrity": "sha512-2nk+7SIVb14QrgXFHcm84tD4bKQz0RxPuMT8Ag5KPOq7J5fEmAg0UbXdTOSHqNuHSU28k55qnceesxXRZGzKWA==", "dev": true, - "license": "MIT", "engines": { "node": ">=18" }, @@ -8004,15 +8145,20 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.2", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dev": true, - "license": "MIT", "dependencies": { + "es-errors": "^1.3.0", "function-bind": "^1.1.2", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", "hasown": "^2.0.0" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -8025,14 +8171,12 @@ "node": ">=8.0.0" } }, - "node_modules/get-port": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/get-port/-/get-port-4.2.0.tgz", - "integrity": "sha512-/b3jarXkH8KJoOMQc3uVGHASwGLPq3gSFJ7tgJm2diza+bydJPTGOibin2steecKeOylE8oY2JERlVWkAJO6yw==", + "node_modules/get-port-please": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/get-port-please/-/get-port-please-3.1.2.tgz", + "integrity": "sha512-Gxc29eLs1fbn6LQ4jSU4vXjlwyZhF5HsGuMAa7gqBP4Rw4yxxltyDUuF5MBclFzDTXO+ACchGQoeela4DSfzdQ==", "dev": true, - "engines": { - "node": ">=6" - } + "license": "MIT" }, "node_modules/get-stream": { "version": "5.2.0", @@ -8312,11 +8456,12 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.1", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dev": true, - "license": "MIT", "dependencies": { - "get-intrinsic": "^1.2.2" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -8634,7 +8779,6 @@ }, "node_modules/inherits": { "version": "2.0.4", - "dev": true, "license": "ISC" }, "node_modules/ini": { @@ -8956,7 +9100,6 @@ }, "node_modules/is-stream": { "version": "2.0.1", - "dev": true, "license": "MIT", "engines": { "node": ">=8" @@ -10901,6 +11044,11 @@ "node": ">=6" } }, + "node_modules/kuler": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz", + "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==" + }, "node_modules/leven": { "version": "3.1.0", "dev": true, @@ -10922,29 +11070,33 @@ } }, "node_modules/lilconfig": { - "version": "3.0.0", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.2.tgz", + "integrity": "sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==", "dev": true, - "license": "MIT", "engines": { "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" } }, "node_modules/lint-staged": { - "version": "15.2.2", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-15.2.2.tgz", - "integrity": "sha512-TiTt93OPh1OZOsb5B7k96A/ATl2AjIZo+vnzFZ6oHK5FuTk63ByDtxGQpHm+kFETjEWqgkF95M8FRXKR/LEBcw==", - "dev": true, - "dependencies": { - "chalk": "5.3.0", - "commander": "11.1.0", - "debug": "4.3.4", - "execa": "8.0.1", - "lilconfig": "3.0.0", - "listr2": "8.0.1", - "micromatch": "4.0.5", - "pidtree": "0.6.0", - "string-argv": "0.3.2", - "yaml": "2.3.4" + "version": "15.2.10", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-15.2.10.tgz", + "integrity": "sha512-5dY5t743e1byO19P9I4b3x8HJwalIznL5E1FWYnU6OWw33KxNBSLAc6Cy7F2PsFEO8FKnLwjwm5hx7aMF0jzZg==", + "dev": true, + "dependencies": { + "chalk": "~5.3.0", + "commander": "~12.1.0", + "debug": "~4.3.6", + "execa": "~8.0.1", + "lilconfig": "~3.1.2", + "listr2": "~8.2.4", + "micromatch": "~4.0.8", + "pidtree": "~0.6.0", + "string-argv": "~0.3.2", + "yaml": "~2.5.0" }, "bin": { "lint-staged": "bin/lint-staged.js" @@ -10968,6 +11120,23 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/lint-staged/node_modules/debug": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, "node_modules/lint-staged/node_modules/execa": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", @@ -11036,6 +11205,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/lint-staged/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, "node_modules/lint-staged/node_modules/npm-run-path": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", @@ -11102,26 +11277,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/lint-staged/node_modules/yaml": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", - "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==", - "dev": true, - "engines": { - "node": ">= 14" - } - }, "node_modules/listr2": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-8.0.1.tgz", - "integrity": "sha512-ovJXBXkKGfq+CwmKTjluEqFi3p4h8xvkxGQQAQan22YCgef4KZ1mKGjzfGh6PL6AW5Csw0QiQPNuQyH+6Xk3hA==", + "version": "8.2.4", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-8.2.4.tgz", + "integrity": "sha512-opevsywziHd3zHCVQGAj8zu+Z3yHNkkoYhWIGnq54RrCVwLz0MozotJEDnKsIBLvkfLGN6BLOyAeRrYI0pKA4g==", "dev": true, "dependencies": { "cli-truncate": "^4.0.0", "colorette": "^2.0.20", "eventemitter3": "^5.0.1", - "log-update": "^6.0.0", - "rfdc": "^1.3.0", + "log-update": "^6.1.0", + "rfdc": "^1.4.1", "wrap-ansi": "^9.0.0" }, "engines": { @@ -11129,9 +11295,10 @@ } }, "node_modules/listr2/node_modules/ansi-regex": { - "version": "6.0.1", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true, - "license": "MIT", "engines": { "node": ">=12" }, @@ -11141,8 +11308,9 @@ }, "node_modules/listr2/node_modules/ansi-styles": { "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", "dev": true, - "license": "MIT", "engines": { "node": ">=12" }, @@ -11151,19 +11319,16 @@ } }, "node_modules/listr2/node_modules/emoji-regex": { - "version": "10.3.0", - "dev": true, - "license": "MIT" - }, - "node_modules/listr2/node_modules/eventemitter3": { - "version": "5.0.1", - "dev": true, - "license": "MIT" + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", + "dev": true }, "node_modules/listr2/node_modules/string-width": { - "version": "7.0.0", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", "dev": true, - "license": "MIT", "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", @@ -11178,8 +11343,9 @@ }, "node_modules/listr2/node_modules/strip-ansi": { "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "dev": true, - "license": "MIT", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -11192,8 +11358,9 @@ }, "node_modules/listr2/node_modules/wrap-ansi": { "version": "9.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", @@ -11420,13 +11587,14 @@ } }, "node_modules/log-update": { - "version": "6.0.0", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz", + "integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==", "dev": true, - "license": "MIT", "dependencies": { - "ansi-escapes": "^6.2.0", - "cli-cursor": "^4.0.0", - "slice-ansi": "^7.0.0", + "ansi-escapes": "^7.0.0", + "cli-cursor": "^5.0.0", + "slice-ansi": "^7.1.0", "strip-ansi": "^7.1.0", "wrap-ansi": "^9.0.0" }, @@ -11438,23 +11606,25 @@ } }, "node_modules/log-update/node_modules/ansi-escapes": { - "version": "6.2.0", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz", + "integrity": "sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==", "dev": true, - "license": "MIT", "dependencies": { - "type-fest": "^3.0.0" + "environment": "^1.0.0" }, "engines": { - "node": ">=14.16" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/log-update/node_modules/ansi-regex": { - "version": "6.0.1", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true, - "license": "MIT", "engines": { "node": ">=12" }, @@ -11464,8 +11634,9 @@ }, "node_modules/log-update/node_modules/ansi-styles": { "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", "dev": true, - "license": "MIT", "engines": { "node": ">=12" }, @@ -11474,28 +11645,31 @@ } }, "node_modules/log-update/node_modules/cli-cursor": { - "version": "4.0.0", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", + "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", "dev": true, - "license": "MIT", "dependencies": { - "restore-cursor": "^4.0.0" + "restore-cursor": "^5.0.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/log-update/node_modules/emoji-regex": { - "version": "10.3.0", - "dev": true, - "license": "MIT" + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", + "dev": true }, "node_modules/log-update/node_modules/is-fullwidth-code-point": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz", + "integrity": "sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==", "dev": true, - "license": "MIT", "dependencies": { "get-east-asian-width": "^1.0.0" }, @@ -11506,25 +11680,54 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/log-update/node_modules/onetime": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", + "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", + "dev": true, + "dependencies": { + "mimic-function": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/log-update/node_modules/restore-cursor": { - "version": "4.0.0", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", + "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", "dev": true, - "license": "MIT", "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" + "onetime": "^7.0.0", + "signal-exit": "^4.1.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/log-update/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/log-update/node_modules/slice-ansi": { "version": "7.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.0.tgz", + "integrity": "sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^6.2.1", "is-fullwidth-code-point": "^5.0.0" @@ -11537,9 +11740,10 @@ } }, "node_modules/log-update/node_modules/string-width": { - "version": "7.0.0", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", "dev": true, - "license": "MIT", "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", @@ -11554,8 +11758,9 @@ }, "node_modules/log-update/node_modules/strip-ansi": { "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "dev": true, - "license": "MIT", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -11566,21 +11771,11 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, - "node_modules/log-update/node_modules/type-fest": { - "version": "3.13.1", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/log-update/node_modules/wrap-ansi": { "version": "9.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", @@ -11593,6 +11788,30 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/logform": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/logform/-/logform-2.6.1.tgz", + "integrity": "sha512-CdaO738xRapbKIMVn2m4F6KTj4j7ooJ8POVnebSgKo3KBz5axNXRAL7ZdRjIV6NOr2Uf4vjtRkxrFETOioCqSA==", + "dependencies": { + "@colors/colors": "1.6.0", + "@types/triple-beam": "^1.3.2", + "fecha": "^4.2.0", + "ms": "^2.1.1", + "safe-stable-stringify": "^2.3.1", + "triple-beam": "^1.3.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/logform/node_modules/@colors/colors": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz", + "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==", + "engines": { + "node": ">=0.1.90" + } + }, "node_modules/long": { "version": "4.0.0", "license": "Apache-2.0" @@ -11846,10 +12065,13 @@ "license": "MIT" }, "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==", - "dev": true + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/merge-stream": { "version": "2.0.0", @@ -11874,11 +12096,12 @@ } }, "node_modules/micromatch": { - "version": "4.0.5", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, - "license": "MIT", "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -11901,7 +12124,6 @@ }, "node_modules/mime-db": { "version": "1.52.0", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.6" @@ -11909,7 +12131,6 @@ }, "node_modules/mime-types": { "version": "2.1.35", - "dev": true, "license": "MIT", "dependencies": { "mime-db": "1.52.0" @@ -11926,6 +12147,18 @@ "node": ">=6" } }, + "node_modules/mimic-function": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", + "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/mimic-response": { "version": "1.0.1", "license": "MIT", @@ -14935,6 +15168,14 @@ "wrappy": "1" } }, + "node_modules/one-time": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz", + "integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==", + "dependencies": { + "fn.name": "1.x.x" + } + }, "node_modules/onetime": { "version": "5.1.2", "dev": true, @@ -15308,9 +15549,9 @@ "license": "MIT" }, "node_modules/path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==", + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.10.tgz", + "integrity": "sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==", "dev": true }, "node_modules/path-type": { @@ -15659,12 +15900,12 @@ "license": "MIT" }, "node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", "dev": true, "dependencies": { - "side-channel": "^1.0.4" + "side-channel": "^1.0.6" }, "engines": { "node": ">=0.6" @@ -16033,9 +16274,10 @@ } }, "node_modules/rfdc": { - "version": "1.3.0", - "dev": true, - "license": "MIT" + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", + "dev": true }, "node_modules/rimraf": { "version": "3.0.2", @@ -16169,7 +16411,6 @@ }, "node_modules/safe-buffer": { "version": "5.1.2", - "dev": true, "license": "MIT" }, "node_modules/safe-regex-test": { @@ -16185,6 +16426,14 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "engines": { + "node": ">=10" + } + }, "node_modules/safer-buffer": { "version": "2.1.2", "dev": true, @@ -16653,9 +16902,9 @@ } }, "node_modules/send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", "dev": true, "dependencies": { "debug": "2.6.9", @@ -16691,6 +16940,15 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/send/node_modules/mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", @@ -16710,15 +16968,15 @@ "dev": true }, "node_modules/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", "dev": true, "dependencies": { - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", - "send": "0.18.0" + "send": "0.19.0" }, "engines": { "node": ">= 0.8.0" @@ -16731,14 +16989,17 @@ "dev": true }, "node_modules/set-function-length": { - "version": "1.1.1", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dev": true, - "license": "MIT", "dependencies": { - "define-data-property": "^1.1.1", - "get-intrinsic": "^1.2.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -16828,13 +17089,18 @@ } }, "node_modules/side-channel": { - "version": "1.0.4", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dev": true, - "license": "MIT", "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -16869,6 +17135,19 @@ "node": ">=4" } }, + "node_modules/simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", + "dependencies": { + "is-arrayish": "^0.3.1" + } + }, + "node_modules/simple-swizzle/node_modules/is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" + }, "node_modules/sisteransi": { "version": "1.0.5", "dev": true, @@ -16895,8 +17174,9 @@ }, "node_modules/slice-ansi": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz", + "integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==", "dev": true, - "license": "MIT", "dependencies": { "ansi-styles": "^6.0.0", "is-fullwidth-code-point": "^4.0.0" @@ -16910,8 +17190,9 @@ }, "node_modules/slice-ansi/node_modules/ansi-styles": { "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", "dev": true, - "license": "MIT", "engines": { "node": ">=12" }, @@ -16921,8 +17202,9 @@ }, "node_modules/slice-ansi/node_modules/is-fullwidth-code-point": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz", + "integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=12" }, @@ -17079,7 +17361,6 @@ }, "node_modules/string_decoder": { "version": "1.1.1", - "dev": true, "license": "MIT", "dependencies": { "safe-buffer": "~5.1.0" @@ -17410,6 +17691,11 @@ "node": ">=8" } }, + "node_modules/text-hex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", + "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==" + }, "node_modules/text-table": { "version": "0.2.0", "dev": true, @@ -17502,6 +17788,14 @@ "node": ">=8" } }, + "node_modules/triple-beam": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.4.1.tgz", + "integrity": "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==", + "engines": { + "node": ">= 14.0.0" + } + }, "node_modules/ts-api-utils": { "version": "1.0.3", "dev": true, @@ -17850,6 +18144,16 @@ "version": "1.0.13", "license": "ISC" }, + "node_modules/typed-emitter": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/typed-emitter/-/typed-emitter-2.1.0.tgz", + "integrity": "sha512-g/KzbYKbH5C2vPkaXGu8DJlHrGKHLsM25Zg9WuC9pMGfuvT+X25tZQWo5fK1BjBm8+UrVE9LDCvaY0CQk+fXDA==", + "dev": true, + "license": "MIT", + "optionalDependencies": { + "rxjs": "*" + } + }, "node_modules/typedarray": { "version": "0.0.6", "dev": true, @@ -18201,7 +18505,6 @@ }, "node_modules/util-deprecate": { "version": "1.0.2", - "dev": true, "license": "MIT" }, "node_modules/utils-merge": { @@ -18384,6 +18687,74 @@ "node": ">=4" } }, + "node_modules/winston": { + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.14.2.tgz", + "integrity": "sha512-CO8cdpBB2yqzEf8v895L+GNKYJiEq8eKlHU38af3snQBQ+sdAIUepjMSguOIJC7ICbzm0ZI+Af2If4vIJrtmOg==", + "dependencies": { + "@colors/colors": "^1.6.0", + "@dabh/diagnostics": "^2.0.2", + "async": "^3.2.3", + "is-stream": "^2.0.0", + "logform": "^2.6.0", + "one-time": "^1.0.0", + "readable-stream": "^3.4.0", + "safe-stable-stringify": "^2.3.1", + "stack-trace": "0.0.x", + "triple-beam": "^1.3.0", + "winston-transport": "^4.7.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/winston-transport": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.7.1.tgz", + "integrity": "sha512-wQCXXVgfv/wUPOfb2x0ruxzwkcZfxcktz6JIMUaPLmcNhO4bZTwA/WtDWK74xV3F2dKu8YadrFv0qhwYjVEwhA==", + "dependencies": { + "logform": "^2.6.1", + "readable-stream": "^3.6.2", + "triple-beam": "^1.3.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/winston-transport/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/winston/node_modules/@colors/colors": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz", + "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==", + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/winston/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/word-wrap": { "version": "1.2.5", "dev": true, @@ -18479,6 +18850,18 @@ "dev": true, "license": "ISC" }, + "node_modules/yaml": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.1.tgz", + "integrity": "sha512-bLQOjaX/ADgQ20isPJRvF0iRUHIxVhYvr53Of7wGcWlO2jvtUlH5m87DsmulFVxRpNLOnI4tB6p/oh8D7kpn9Q==", + "dev": true, + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, "node_modules/yargs": { "version": "17.7.2", "license": "MIT", diff --git a/package.json b/package.json index df204ecf..eea7ee97 100644 --- a/package.json +++ b/package.json @@ -119,6 +119,7 @@ "eslint-plugin-import": "^2.29.1", "eslint-plugin-prettier": "^5.0.1", "express": "^4.19.2", + "get-port-please": "^3.1.2", "grpc-tools": "^1.12.4", "husky": "^8.0.3", "jest": "^29.7.0", @@ -131,6 +132,7 @@ "ts-protoc-gen": "^0.15.0", "tsconfig-paths": "^4.2.0", "tsd": "^0.31.0", + "typed-emitter": "^2.1.0", "typedoc": "^0.25.9", "typedoc-plugin-include-example": "^1.2.0", "typedoc-plugin-missing-exports": "^2.2.0", @@ -142,11 +144,13 @@ "dependencies": { "@grpc/grpc-js": "1.10.9", "@grpc/proto-loader": "0.7.13", + "@types/form-data": "^2.2.1", "chalk": "^2.4.2", "console-stamp": "^3.0.2", "dayjs": "^1.8.15", "debug": "^4.3.4", "fast-xml-parser": "^4.1.3", + "form-data": "^4.0.1", "got": "^11.8.6", "jwt-decode": "^4.0.0", "lodash.mergewith": "^4.6.2", @@ -157,6 +161,7 @@ "reflect-metadata": "^0.2.1", "stack-trace": "0.0.10", "typed-duration": "^1.0.12", - "uuid": "^7.0.3" + "uuid": "^7.0.3", + "winston": "^3.14.2" } } \ No newline at end of file diff --git a/src/__tests__/c8/rest/activateJobs.rest.spec.ts b/src/__tests__/c8/rest/activateJobs.rest.spec.ts new file mode 100644 index 00000000..f9072b96 --- /dev/null +++ b/src/__tests__/c8/rest/activateJobs.rest.spec.ts @@ -0,0 +1,43 @@ +import path from 'node:path' + +import { CamundaRestClient } from '../../../c8/lib/CamundaRestClient' + +let processDefinitionId: string +const restClient = new CamundaRestClient() + +beforeAll(async () => { + const res = await restClient.deployResourcesFromFiles([ + path.join( + '.', + 'src', + '__tests__', + 'testdata', + 'hello-world-complete-rest.bpmn' + ), + ]) + processDefinitionId = res.processes[0].processDefinitionId +}) + +test('Can service a task', (done) => { + restClient + .createProcessInstance({ + processDefinitionId, + variables: { + someNumberField: 8, + }, + }) + .then(() => { + restClient + .activateJobs({ + maxJobsToActivate: 2, + requestTimeout: 5000, + timeout: 5000, + type: 'console-log-complete-rest', + worker: 'test', + }) + .then((jobs) => { + expect(jobs.length).toBe(1) + jobs[0].complete().then(() => done()) + }) + }) +}) diff --git a/src/__tests__/c8/rest/broadcastSigna.rest.spec.ts b/src/__tests__/c8/rest/broadcastSigna.rest.spec.ts new file mode 100644 index 00000000..e9b6a2ac --- /dev/null +++ b/src/__tests__/c8/rest/broadcastSigna.rest.spec.ts @@ -0,0 +1,52 @@ +import { CamundaRestClient } from '../../../c8/lib/CamundaRestClient' +import { LosslessDto } from '../../../lib' +import { cancelProcesses } from '../../../zeebe/lib/cancelProcesses' + +jest.setTimeout(60000) + +const c8 = new CamundaRestClient() +let pid: string + +beforeAll(async () => { + const res = await c8.deployResourcesFromFiles([ + './src/__tests__/testdata/Signal.bpmn', + ]) + pid = res.processes[0].processDefinitionKey + await cancelProcesses(pid) +}) + +afterAll(async () => { + await cancelProcesses(pid) +}) + +test('Can start a process with a signal', async () => { + await c8.deployResourcesFromFiles(['./src/__tests__/testdata/Signal.bpmn']) + + const res = await c8.broadcastSignal({ + signalName: 'test-signal', + variables: { + success: true, + }, + }) + + expect(res.signalKey).toBeTruthy() + + await new Promise((resolve) => { + const w = c8.createJobWorker({ + type: 'signal-service-task', + worker: 'signal-worker', + timeout: 10000, + pollIntervalMs: 1000, + maxJobsToActivate: 10, + inputVariableDto: class extends LosslessDto { + success!: boolean + }, + jobHandler: (job) => { + const ack = job.complete() + expect(job.variables.success).toBe(true) + w.stop().then(() => resolve(null)) + return ack + }, + }) + }) +}) diff --git a/src/__tests__/c8/rest/createProcess.rest.spec.ts b/src/__tests__/c8/rest/createProcess.rest.spec.ts new file mode 100644 index 00000000..7422dd1e --- /dev/null +++ b/src/__tests__/c8/rest/createProcess.rest.spec.ts @@ -0,0 +1,105 @@ +import path from 'node:path' + +import { CamundaRestClient } from '../../../c8/lib/CamundaRestClient' +import { createDtoInstance, LosslessDto } from '../../../lib' + +jest.setTimeout(17000) + +let processDefinitionId: string +let processDefinitionKey: string +const restClient = new CamundaRestClient() + +beforeAll(async () => { + const res = await restClient.deployResourcesFromFiles([ + path.join('.', 'src', '__tests__', 'testdata', 'create-process-rest.bpmn'), + ]) + ;({ processDefinitionId, processDefinitionKey } = res.processes[0]) +}) + +class myVariableDto extends LosslessDto { + someNumberField?: number +} + +test('Can create a process from bpmn id', (done) => { + restClient + .createProcessInstance({ + processDefinitionId, + variables: { + someNumberField: 8, + }, + }) + .then((res) => { + expect(res.processDefinitionKey).toEqual(processDefinitionKey) + done() + }) +}) + +test('Can create a process from process definition key', (done) => { + restClient + .createProcessInstance({ + processDefinitionKey, + variables: { + someNumberField: 8, + }, + }) + .then((res) => { + expect(res.processDefinitionKey).toEqual(processDefinitionKey) + done() + }) +}) + +test('Can create a process with a lossless Dto', (done) => { + restClient + .createProcessInstance({ + processDefinitionKey, + variables: createDtoInstance(myVariableDto, { someNumberField: 8 }), + }) + .then((res) => { + expect(res.processDefinitionKey).toEqual(processDefinitionKey) + done() + }) +}) + +test('Can create a process and get the result', (done) => { + const variables = createDtoInstance(myVariableDto, { someNumberField: 8 }) + restClient + .createProcessInstanceWithResult({ + processDefinitionKey, + variables, + outputVariablesDto: myVariableDto, + }) + .then((res) => { + expect(res.processDefinitionKey).toEqual(processDefinitionKey) + expect(res.variables.someNumberField).toBe(8) + done() + }) +}) + +test('Can create a process and get the result', (done) => { + restClient + .createProcessInstanceWithResult({ + processDefinitionKey, + variables: createDtoInstance(myVariableDto, { someNumberField: 9 }), + }) + .then((res) => { + expect(res.processDefinitionKey).toEqual(processDefinitionKey) + // Without an outputVariablesDto, the response variables will be of type unknown + // eslint-disable-next-line @typescript-eslint/no-explicit-any + expect((res.variables as any).someNumberField).toBe(9) + done() + }) +}) + +test('What happens if we time out?', async () => { + const res = await restClient.deployResourcesFromFiles([ + path.join('.', 'src', '__tests__', 'testdata', 'hello-world-complete.bpmn'), + ]) + const processDefinitionId = res.processes[0].processDefinitionId + await expect( + restClient.createProcessInstanceWithResult({ + processDefinitionId, + variables: createDtoInstance(myVariableDto, { someNumberField: 9 }), + requestTimeout: 20000, + }) + ).rejects.toThrow('504') +}) diff --git a/src/__tests__/c8/rest/deleteResource.rest.spec.ts b/src/__tests__/c8/rest/deleteResource.rest.spec.ts new file mode 100644 index 00000000..a00b2d4f --- /dev/null +++ b/src/__tests__/c8/rest/deleteResource.rest.spec.ts @@ -0,0 +1,21 @@ +import { CamundaRestClient } from '../../../c8/lib/CamundaRestClient' + +const c8 = new CamundaRestClient() + +test('It can delete a resource', async () => { + const res = await c8.deployResourcesFromFiles([ + './src/__tests__/testdata/Delete-Resource-Rest.bpmn', + ]) + const key = res.processes[0].processDefinitionKey + const id = res.processes[0].processDefinitionId + const wfi = await c8.createProcessInstance({ + processDefinitionId: id, + variables: {}, + }) + expect(wfi.processDefinitionKey).toBe(key) + await c8.deleteResource({ resourceKey: key }) + // After deleting the process definition, we should not be able to start a new process instance. + await expect( + c8.createProcessInstance({ processDefinitionId: id, variables: {} }) + ).rejects.toThrow('404') +}) diff --git a/src/__tests__/c8/rest/migrateProcess.rest.spec.ts b/src/__tests__/c8/rest/migrateProcess.rest.spec.ts new file mode 100644 index 00000000..7acac7cd --- /dev/null +++ b/src/__tests__/c8/rest/migrateProcess.rest.spec.ts @@ -0,0 +1,113 @@ +import path from 'path' + +import { CamundaJobWorker } from '../../../c8/lib/CamundaJobWorker' +import { CamundaRestClient } from '../../../c8/lib/CamundaRestClient' +import { LosslessDto } from '../../../lib' + +const c8 = new CamundaRestClient() + +class CustomHeaders extends LosslessDto { + ProcessVersion!: number +} + +test('RestClient can migrate a process instance', async () => { + // Deploy a process model + await c8.deployResourcesFromFiles([ + path.join( + '.', + 'src', + '__tests__', + 'testdata', + 'MigrateProcess-Rest-Version-1.bpmn' + ), + ]) + + // Create an instance of the process model + const processInstance = await c8.createProcessInstance({ + processDefinitionId: 'migrant-work-rest', + variables: {}, + }) + + let instanceKey = '' + let processVersion = 0 + + await new Promise>((res) => { + const w = c8.createJobWorker({ + type: 'migrant-rest-worker-task-1', + maxJobsToActivate: 10, + timeout: 30000, + pollIntervalMs: 1000, + worker: 'Migrant Worker 1', + customHeadersDto: CustomHeaders, + jobHandler: async (job) => { + instanceKey = job.processInstanceKey + processVersion = job.customHeaders.ProcessVersion as number + return job.complete().then(async (outcome) => { + res(w) + return outcome + }) + }, + }) + }).then((w) => w.stop()) + + expect(instanceKey).toBe(processInstance.processInstanceKey) + expect(processVersion).toBe('1') + + // Deploy the updated process model + const res1 = await c8.deployResourcesFromFiles([ + './src/__tests__/testdata/MigrateProcess-Rest-Version-2.bpmn', + ]) + + // Migrate the process instance to the updated process model + + await c8.migrateProcessInstance({ + processInstanceKey: processInstance.processInstanceKey, + mappingInstructions: [ + { + sourceElementId: 'Activity_050vmrm', + targetElementId: 'Activity_050vmrm', + }, + ], + targetProcessDefinitionKey: res1.processes[0].processDefinitionKey, + }) + + // Complete the job in the process instance + + await new Promise>((res) => { + const w = c8.createJobWorker({ + type: 'migration-rest-checkpoint', + worker: 'Migrant Checkpoint worker', + maxJobsToActivate: 10, + timeout: 10000, + pollIntervalMs: 1000, + jobHandler: async (job) => { + return job.complete().then(async (outcome) => { + res(w) + return outcome + }) + }, + }) + }).then((w) => w.stop()) + + await new Promise>((res) => { + const w = c8.createJobWorker({ + type: 'migrant-rest-worker-task-2', + worker: 'Migrant Worker 2', + maxJobsToActivate: 10, + timeout: 30000, + pollIntervalMs: 1000, + customHeadersDto: CustomHeaders, + jobHandler: async (job) => { + instanceKey = job.processInstanceKey + processVersion = job.customHeaders.ProcessVersion as number + return job.complete().then(async (outcome) => { + res(w) + return outcome + }) + }, + }) + }).then((w) => w.stop()) + + expect(instanceKey).toBe(processInstance.processInstanceKey) + expect(processVersion).toBe('2') +}) diff --git a/src/__tests__/c8/rest/parseJobs.unit.spec.ts b/src/__tests__/c8/rest/parseJobs.unit.spec.ts new file mode 100644 index 00000000..07c35a6f --- /dev/null +++ b/src/__tests__/c8/rest/parseJobs.unit.spec.ts @@ -0,0 +1,23 @@ +import { createSpecializedRestApiJobClass } from '../../../c8/lib/RestApiJobClassFactory' +import { Int64String, LosslessDto, losslessParse } from '../../../lib' + +class Variables extends LosslessDto { + @Int64String + bigValue!: string +} + +class CustomHeaders extends LosslessDto { + @Int64String + bigHeader!: string + smallHeader!: number +} + +const myJob = createSpecializedRestApiJobClass(Variables, CustomHeaders) + +test('It correctly parses variables and custom headers', () => { + const jsonString = `{"jobs":[{"key":2251799813737371,"type":"console-log-complete","processInstanceKey":2251799813737366,"processDefinitionId":"hello-world-complete","processDefinitionVersion":1,"processDefinitionKey":2251799813736299,"elementId":"ServiceTask_0g6tf5f","elementInstanceKey":2251799813737370,"customHeaders":{"message":"Hello World","bigHeader":1,"smallHeader":2},"worker":"test","retries":100,"deadline":1725501895792,"variables":{"bigValue":3},"tenantId":""}]}` + const res = losslessParse(jsonString, myJob, 'jobs') + expect(res[0].variables.bigValue).toBe('3') + expect(res[0].customHeaders.smallHeader).toBe(2) + expect(res[0].customHeaders.bigHeader).toBe('1') +}) diff --git a/src/__tests__/c8/rest/pinClock.rest.spec.ts b/src/__tests__/c8/rest/pinClock.rest.spec.ts new file mode 100644 index 00000000..64535ab3 --- /dev/null +++ b/src/__tests__/c8/rest/pinClock.rest.spec.ts @@ -0,0 +1,10 @@ +import { CamundaRestClient } from '../../../c8/lib/CamundaRestClient' + +test('We can pin the clock, and reset it', async () => { + const now = Date.now() + const c8 = new CamundaRestClient() + await c8.pinInternalClock(now) // Pin the clock to the present time + await c8.pinInternalClock(now + 1000) // Move the clock forward 1 second + await c8.resetClock() // Reset the clock + expect(now).toEqual(now) +}) diff --git a/src/__tests__/c8/rest/publishMessage.rest.spec.ts b/src/__tests__/c8/rest/publishMessage.rest.spec.ts new file mode 100644 index 00000000..c6594a00 --- /dev/null +++ b/src/__tests__/c8/rest/publishMessage.rest.spec.ts @@ -0,0 +1,95 @@ +import { v4 } from 'uuid' + +import { CamundaRestClient } from '../../../c8/lib/CamundaRestClient' +import { LosslessDto } from '../../../lib' + +const c8 = new CamundaRestClient() + +beforeAll(async () => { + await c8.deployResourcesFromFiles([ + './src/__tests__/testdata/rest-message-test.bpmn', + ]) +}) + +test('Can publish a message', (done) => { + const uuid = v4() + const outputVariablesDto = class extends LosslessDto { + messageReceived!: boolean + } + c8.createProcessInstanceWithResult({ + processDefinitionId: 'rest-message-test', + variables: { + correlationId: uuid, + }, + outputVariablesDto, + }).then((result) => { + expect(result.variables.messageReceived).toBe(true) + done() + }) + c8.publishMessage({ + correlationKey: uuid, + messageId: uuid, + name: 'rest-message', + variables: { + messageReceived: true, + }, + timeToLive: 10000, + }) +}) + +test('Can correlate a message', (done) => { + const uuid = v4() + const outputVariablesDto = class extends LosslessDto { + messageReceived!: boolean + } + c8.createProcessInstanceWithResult({ + processDefinitionId: 'rest-message-test', + variables: { + correlationId: uuid, + }, + outputVariablesDto, + }).then((result) => { + expect(result.variables.messageReceived).toBe(true) + done() + }) + setTimeout( + () => + c8.correlateMessage({ + correlationKey: uuid, + name: 'rest-message', + variables: { + messageReceived: true, + }, + }), + 1000 + ) +}) + +test('Correlate message returns expected data', (done) => { + const uuid = v4() + let processInstanceKey: string + c8.createProcessInstance({ + processDefinitionId: 'rest-message-test', + variables: { + correlationId: uuid, + }, + }).then((result) => { + processInstanceKey = result.processInstanceKey + setTimeout( + () => + c8 + .correlateMessage({ + correlationKey: uuid, + name: 'rest-message', + variables: { + messageReceived: true, + }, + }) + .then((res) => { + expect(res.processInstanceKey).toBe(processInstanceKey) + done() + }), + 1000 + ) + }) +}) diff --git a/src/__tests__/config/jest.cleanup.ts b/src/__tests__/config/jest.cleanup.ts index 80bdccf1..2bdfb119 100644 --- a/src/__tests__/config/jest.cleanup.ts +++ b/src/__tests__/config/jest.cleanup.ts @@ -67,7 +67,6 @@ export const cleanUp = async () => { await zeebe.cancelProcessInstance(key) console.log(`Cancelled process instance ${key}`) } catch (e) { - console.log(e) if (!(e as Error).message.startsWith('5 NOT_FOUND')) { console.log('Failed to cancel process instance', key) console.log((e as Error).message) diff --git a/src/__tests__/lib/GetCustomCertificateBuffer.unit.spec.ts b/src/__tests__/lib/GetCustomCertificateBuffer.unit.spec.ts index 481d5b67..eba2b756 100644 --- a/src/__tests__/lib/GetCustomCertificateBuffer.unit.spec.ts +++ b/src/__tests__/lib/GetCustomCertificateBuffer.unit.spec.ts @@ -5,6 +5,7 @@ import path from 'path' import { loadPackageDefinition, Server, ServerCredentials } from '@grpc/grpc-js' import { loadSync } from '@grpc/proto-loader' import express from 'express' +import { getPort } from 'get-port-please' import { BrokerInfo, @@ -82,86 +83,95 @@ test('Can use a custom root certificate to connect to a REST API', async () => { server.close() }) -test('gRPC server with self-signed certificate', (done) => { - // Load the protobuf definition - const packageDefinition = loadSync( - path.join(__dirname, '..', '..', 'proto', 'zeebe.proto'), - { - keepCase: true, - longs: String, - enums: String, - defaults: true, - oneofs: true, - } - ) +test('gRPC server with self-signed certificate', async () => { + // eslint-disable-next-line no-async-promise-executor + return new Promise(async (resolve) => { + // Load the protobuf definition + const packageDefinition = loadSync( + path.join(__dirname, '..', '..', 'proto', 'zeebe.proto'), + { + keepCase: true, + longs: String, + enums: String, + defaults: true, + oneofs: true, + } + ) - const zeebeProto = loadPackageDefinition( - packageDefinition - // eslint-disable-next-line @typescript-eslint/no-explicit-any - ) as unknown as { gateway_protocol: { Gateway: any } } + const zeebeProto = loadPackageDefinition( + packageDefinition + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ) as unknown as { gateway_protocol: { Gateway: any } } - // Create the server - server = new Server() + // Create the server + server = new Server() - // Add a service to the server - server.addService(zeebeProto.gateway_protocol.Gateway.service, { - Topology: (_, callback) => { - const t = new TopologyResponse() - const b = new BrokerInfo() - b.setHost('localhost') - const partition = new Partition() - partition.setHealth(0) - partition.setPartitionid(0) - partition.setRole(0) - b.setPartitionsList([partition]) - t.setBrokersList([b]) - callback(null, t) - }, - // Implement your service methods here - }) + // Add a service to the server + server.addService(zeebeProto.gateway_protocol.Gateway.service, { + Topology: (_, callback) => { + const t = new TopologyResponse() + const b = new BrokerInfo() + b.setHost('localhost') + const partition = new Partition() + partition.setHealth(0) + partition.setPartitionid(0) + partition.setRole(0) + b.setPartitionsList([partition]) + t.setBrokersList([b]) + callback(null, t) + }, + // Implement your service methods here + }) - // Read the key and certificate - const key = fs.readFileSync(path.join(__dirname, 'localhost.key')) - const cert = fs.readFileSync(path.join(__dirname, 'localhost.crt')) + // Read the key and certificate + const key = fs.readFileSync(path.join(__dirname, 'localhost.key')) + const cert = fs.readFileSync(path.join(__dirname, 'localhost.crt')) - // Start the server - server.bindAsync( - 'localhost:50051', - ServerCredentials.createSsl(null, [ - { - private_key: key, - cert_chain: cert, - }, - ]), - (err) => { - if (err) { - console.error(err) - done() - return - } + const port = await getPort() - const zbc = new ZeebeGrpcClient({ - config: { - CAMUNDA_OAUTH_DISABLED: true, - ZEEBE_ADDRESS: 'localhost:50051', - CAMUNDA_CUSTOM_ROOT_CERT_PATH: path.join(__dirname, 'localhost.crt'), - CAMUNDA_SECURE_CONNECTION: true, - zeebeGrpcSettings: { - ZEEBE_CLIENT_LOG_LEVEL: 'NONE', - }, + // Start the server + server.bindAsync( + `localhost:${port}`, + ServerCredentials.createSsl(null, [ + { + private_key: key, + cert_chain: cert, }, - }) - zbc.topology().then(() => { - expect(true).toBe(true) - zbc.close() - // Stop the server after the test - server.tryShutdown((err) => { - if (err) console.error(err) - done() + ]), + (err) => { + if (err) { + console.error(err) + resolve() + return + } + + const zbc = new ZeebeGrpcClient({ + config: { + CAMUNDA_OAUTH_DISABLED: true, + ZEEBE_ADDRESS: `localhost:${port}`, + CAMUNDA_CUSTOM_ROOT_CERT_PATH: path.join( + __dirname, + 'localhost.crt' + ), + CAMUNDA_SECURE_CONNECTION: true, + zeebeGrpcSettings: { + ZEEBE_CLIENT_LOG_LEVEL: 'NONE', + }, + }, }) - }) - } - ) + zbc.topology().then(() => { + expect(true).toBe(true) + zbc.close() + // Stop the server after the test + server.tryShutdown((err) => { + if (err) console.error(err) + resolve() + return + }) + }) + } + ) + }) }) test('gRPC server with self-signed certificate provided via string', (done) => { diff --git a/src/__tests__/lib/LosslessJsonParser.unit.spec.ts b/src/__tests__/lib/LosslessJsonParser.unit.spec.ts index 26d22244..7906413d 100644 --- a/src/__tests__/lib/LosslessJsonParser.unit.spec.ts +++ b/src/__tests__/lib/LosslessJsonParser.unit.spec.ts @@ -1,7 +1,10 @@ import { BigIntValue, + BigIntValueArray, ChildDto, + createDtoInstance, Int64String, + Int64StringArray, LosslessDto, losslessParse, losslessStringify, @@ -344,3 +347,114 @@ test('LosslessStringify correctly handles null objects', () => { const stringifiedDto = losslessStringify(json) expect(stringifiedDto).toBe(`{"abc":[null,null,null]}`) // 3 (string) }) + +test('LosslessJsonParser handles subkeys', () => { + const jsonString = `{"jobs":[{"key":2251799813737371,"type":"console-log-complete","processInstanceKey":2251799813737366,"processDefinitionId":"hello-world-complete","processDefinitionVersion":1,"processDefinitionKey":2251799813736299,"elementId":"ServiceTask_0g6tf5f","elementInstanceKey":2251799813737370,"customHeaders":{"message":"Hello World"},"worker":"test","retries":100,"deadline":1725501895792,"variables":{},"tenantId":""}]}` + + const parsed = losslessParse(jsonString, undefined, 'jobs') + expect(parsed[0].key).toBe(2251799813737371) +}) + +test('LosslessJsonParser will throw if given stringified JSON with an unsafe integer number', () => { + let threw = false + const json = `{"unsafeNumber": 9223372036854775808}` // Unsafe integer (greater than Int64 max) + + try { + losslessParse(json) // Attempt to parse un-mapped JSON directly + } catch (e) { + threw = true + expect((e as Error).message.includes('unsafe number value')).toBe(true) + } + + expect(threw).toBe(true) +}) + +test('LosslessJsonParser will throw if given stringified JSON with an unsafe integer number, even with a Dto', () => { + let threw = false + const json = `{"unsafeNumber": 9223372036854775808}` // Unsafe integer (greater than Int64 max) + + class Dto extends LosslessDto { + unsafeNumber!: number + } + + try { + losslessParse(json, Dto) // Attempt to parse mapped JSON without a mapping + } catch (e) { + threw = true + expect((e as Error).message.includes('unsafe number value')).toBe(true) + } + + expect(threw).toBe(true) +}) + +test('It rejects Date, Map, and Set types', () => { + class Dto extends LosslessDto { + date?: Date + name?: string + map?: Map + set?: Set + } + const date = new Date() + const dto = createDtoInstance(Dto, { date, name: 'me' }) + expect(() => losslessStringify(dto)).toThrow('Date') + const mapDto = createDtoInstance(Dto, { map: new Map() }) + expect(() => losslessStringify(mapDto)).toThrow('Map') + const setDto = createDtoInstance(Dto, { set: new Set() }) + expect(() => losslessStringify(setDto)).toThrow('Set') +}) + +test('It correctly handles a number array in a subkey', () => { + const json = `{"message":"Hello from automation","userId":null,"sendTo":[12022907,12022896,12022831]}` + const res = losslessParse(json) + expect(res.sendTo[0]).toBe(12022907) +}) + +test('It correctly handles a number array in a subkey with a DTO (Int64StringArray)', () => { + class Dto extends LosslessDto { + message!: string + userId!: number + @Int64StringArray + sendTo!: string[] + } + + const json = `{"message":"Hello from automation","userId":null,"sendTo":[12022907,12022896,12022831]}` + const res = losslessParse(json, Dto) + expect(res.sendTo[0]).toBe('12022907') +}) + +test('It correctly handles a number array in a subkey with a DTO (BigIntValueArray)', () => { + class Dto extends LosslessDto { + message!: string + userId!: number + @BigIntValueArray + sendTo!: string[] + } + + const json = `{"message":"Hello from automation","userId":null,"sendTo":[12022907,12022896,12022831]}` + const res = losslessParse(json, Dto) + expect(res.sendTo[0]).toBe(BigInt('12022907')) +}) + +test('It correctly throws when encountering a number rather than an array in a subkey with a DTO (Int64StringArray)', () => { + class Dto extends LosslessDto { + message!: string + userId!: number + @Int64StringArray + sendTo!: string[] + } + + const json = `{"message":"Hello from automation","userId":null,"sendTo":12022907}` + expect(() => losslessParse(json, Dto)).toThrow('expected Array') +}) + +test('It correctly throws when encountering a number rather than an array in a subkey with a DTO (BigIntValueArray)', () => { + class Dto extends LosslessDto { + message!: string + userId!: number + @BigIntValueArray + sendTo!: string[] + } + + const json = `{"message":"Hello from automation","userId":null,"sendTo":12022907}` + expect(() => losslessParse(json, Dto)).toThrow('expected Array') +}) diff --git a/src/__tests__/oauth/OAuthProvider.unit.spec.ts b/src/__tests__/oauth/OAuthProvider.unit.spec.ts index b2ee1f04..66331b94 100644 --- a/src/__tests__/oauth/OAuthProvider.unit.spec.ts +++ b/src/__tests__/oauth/OAuthProvider.unit.spec.ts @@ -249,13 +249,13 @@ describe('OAuthProvider', () => { }) it('Uses form encoding for request', (done) => { - const serverPort3001 = 3001 + const serverPort3010 = 3010 const o = new OAuthProvider({ config: { CAMUNDA_ZEEBE_OAUTH_AUDIENCE: 'token', ZEEBE_CLIENT_ID: 'clientId8', ZEEBE_CLIENT_SECRET: 'clientSecret', - CAMUNDA_OAUTH_URL: `http://127.0.0.1:${serverPort3001}`, + CAMUNDA_OAUTH_URL: `http://127.0.0.1:${serverPort3010}`, }, }) const secret = 'YOUR_SECRET' @@ -281,7 +281,7 @@ describe('OAuthProvider', () => { }) } }) - .listen(serverPort3001) + .listen(serverPort3010) o.getToken('OPERATE') }) diff --git a/src/__tests__/operate/operate-integration.spec.ts b/src/__tests__/operate/operate-integration.spec.ts index 35487422..dc1b8772 100644 --- a/src/__tests__/operate/operate-integration.spec.ts +++ b/src/__tests__/operate/operate-integration.spec.ts @@ -2,7 +2,7 @@ import { LosslessNumber } from 'lossless-json' import { HTTPError, - RESTError, + RestError, restoreZeebeLogging, suppressZeebeLogging, } from '../../lib' @@ -95,7 +95,7 @@ test('test error type', async () => { */ const res = await c .getProcessInstance(`${p.processInstanceKey}1`) - .catch((e: RESTError) => { + .catch((e: RestError) => { // console.log(e.code) // `ERR_NON_2XX_3XX_RESPONSE` diff --git a/src/__tests__/optimize/optimize.integration.spec.ts b/src/__tests__/optimize/optimize.integration.spec.ts index fceea2ce..fda62f2a 100644 --- a/src/__tests__/optimize/optimize.integration.spec.ts +++ b/src/__tests__/optimize/optimize.integration.spec.ts @@ -1,16 +1,15 @@ -import { promises as fsPromises } from 'fs' - import { OptimizeApiClient } from '../../optimize/lib/OptimizeApiClient' +/** + * Automatically spun up environments for testing do not have data in them for read operations from Optimize. + * So this test 404s as expected. + * It is testing that we can auth correctly, and access the endpoint. + */ +// Test disabled. See: https://github.com/camunda/camunda-8-js-sdk/issues/253 xtest('Can get Dashboards', async () => { const id = '8a7103a7-c086-48f8-b5b7-a7f83e864688' const client = new OptimizeApiClient() - const res = await client.exportDashboardDefinitions([id]) - await fsPromises.writeFile( - 'exported-dashboard.json', - JSON.stringify(res, null, 2) - ) - expect(res).toBeTruthy() + await expect(client.exportDashboardDefinitions([id])).rejects.toThrow('404') }) test('Can get readiness', async () => { diff --git a/src/__tests__/testdata/Delete-Resource-Rest.bpmn b/src/__tests__/testdata/Delete-Resource-Rest.bpmn new file mode 100644 index 00000000..00b396ae --- /dev/null +++ b/src/__tests__/testdata/Delete-Resource-Rest.bpmn @@ -0,0 +1,32 @@ + + + + + Flow_0z9jd9c + + + Flow_0z9jd9c + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/__tests__/testdata/MigrateProcess-Rest-Version-1.bpmn b/src/__tests__/testdata/MigrateProcess-Rest-Version-1.bpmn new file mode 100644 index 00000000..8ff09430 --- /dev/null +++ b/src/__tests__/testdata/MigrateProcess-Rest-Version-1.bpmn @@ -0,0 +1,91 @@ + + + + + Flow_167nn02 + + + + Flow_1r250pk + + + + + + + + + Flow_167nn02 + Flow_04fsyv6 + + + + + + + + + + + Flow_1igeic8 + Flow_1r250pk + + + + + + + Flow_04fsyv6 + Flow_1igeic8 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/__tests__/testdata/MigrateProcess-Rest-Version-2.bpmn b/src/__tests__/testdata/MigrateProcess-Rest-Version-2.bpmn new file mode 100644 index 00000000..c26943fb --- /dev/null +++ b/src/__tests__/testdata/MigrateProcess-Rest-Version-2.bpmn @@ -0,0 +1,91 @@ + + + + + Flow_167nn02 + + + + Flow_1r250pk + + + + + + + + + Flow_167nn02 + Flow_04fsyv6 + + + + + + + + + + + Flow_1igeic8 + Flow_1r250pk + + + + + + + Flow_04fsyv6 + Flow_1igeic8 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/__tests__/testdata/create-process-rest.bpmn b/src/__tests__/testdata/create-process-rest.bpmn new file mode 100644 index 00000000..4f6d520e --- /dev/null +++ b/src/__tests__/testdata/create-process-rest.bpmn @@ -0,0 +1,32 @@ + + + + + Flow_15yxzfg + + + Flow_15yxzfg + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/__tests__/testdata/hello-world-complete-rest.bpmn b/src/__tests__/testdata/hello-world-complete-rest.bpmn new file mode 100644 index 00000000..263f15cc --- /dev/null +++ b/src/__tests__/testdata/hello-world-complete-rest.bpmn @@ -0,0 +1,55 @@ + + + + + SequenceFlow_0fp53hs + + + + + + + + + SequenceFlow_0fp53hs + SequenceFlow_112zghv + + + + SequenceFlow_112zghv + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/__tests__/testdata/rest-message-test.bpmn b/src/__tests__/testdata/rest-message-test.bpmn new file mode 100644 index 00000000..fca878af --- /dev/null +++ b/src/__tests__/testdata/rest-message-test.bpmn @@ -0,0 +1,53 @@ + + + + + Flow_083hybf + + + + Flow_083hybf + Flow_0c1qlsc + + + + Flow_0c1qlsc + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/__tests__/zeebe/integration/Client-DeployResource.spec.ts b/src/__tests__/zeebe/integration/Client-DeployResource.spec.ts index 3f6419a9..a46a74c5 100644 --- a/src/__tests__/zeebe/integration/Client-DeployResource.spec.ts +++ b/src/__tests__/zeebe/integration/Client-DeployResource.spec.ts @@ -61,7 +61,7 @@ test('deploys a Form', async () => { }) expect(result.deployments[0].form).not.toBeNull() }) -test.only('deploys multiple resources', async () => { +test('deploys multiple resources', async () => { const result = await zbc.deployResources([ { processFilename: './src/__tests__/testdata/Client-DeployWorkflow.bpmn', diff --git a/src/__tests__/zeebe/integration/Client-Update-Job-Timeout.spec.ts b/src/__tests__/zeebe/integration/Client-Update-Job-Timeout.spec.ts index 2584354e..12b53ddd 100644 --- a/src/__tests__/zeebe/integration/Client-Update-Job-Timeout.spec.ts +++ b/src/__tests__/zeebe/integration/Client-Update-Job-Timeout.spec.ts @@ -22,7 +22,6 @@ afterEach(async () => { await zbc.cancelProcessInstance(wf.processInstanceKey) } } catch (e: unknown) { - // console.log('Caught NOT FOUND') // @DEBUG } finally { await zbc.close() // Makes sure we don't forget to close connection } diff --git a/src/__tests__/zeebe/integration/Client-integration.spec.ts b/src/__tests__/zeebe/integration/Client-integration.spec.ts index 7770b52c..2b6b5963 100644 --- a/src/__tests__/zeebe/integration/Client-integration.spec.ts +++ b/src/__tests__/zeebe/integration/Client-integration.spec.ts @@ -70,7 +70,7 @@ test("does not retry to cancel a process instance that doesn't exist", async () // See: https://github.com/zeebe-io/zeebe/issues/2680 // await zbc.cancelProcessInstance('123LoL') try { - await zbc.cancelProcessInstance(2251799813686202) + await zbc.cancelProcessInstance('2251799813686202') } catch (e: unknown) { expect((e as Error).message.indexOf('5 NOT_FOUND:')).toBe(0) } diff --git a/src/__tests__/zeebe/integration/Worker-Failure-Retries.spec.ts b/src/__tests__/zeebe/integration/Worker-Failure-Retries.spec.ts index f47b3ae2..27ec4f58 100644 --- a/src/__tests__/zeebe/integration/Worker-Failure-Retries.spec.ts +++ b/src/__tests__/zeebe/integration/Worker-Failure-Retries.spec.ts @@ -22,7 +22,6 @@ afterEach(async () => { await zbc.cancelProcessInstance(wf.processInstanceKey) } } catch (e: unknown) { - // console.log('Caught NOT FOUND') // @DEBUG } finally { await zbc.close() // Makes sure we don't forget to close connection } diff --git a/src/__tests__/zeebe/integration/Worker-Failure.spec.ts b/src/__tests__/zeebe/integration/Worker-Failure.spec.ts index e018eea2..041c7463 100644 --- a/src/__tests__/zeebe/integration/Worker-Failure.spec.ts +++ b/src/__tests__/zeebe/integration/Worker-Failure.spec.ts @@ -58,7 +58,6 @@ afterEach(async () => { await zbc.cancelProcessInstance(wf.processInstanceKey) } } catch (e: unknown) { - // console.log('Caught NOT FOUND') // @DEBUG } }) diff --git a/src/__tests__/zeebe/local-integration/OnConnectionError.spec.ts b/src/__tests__/zeebe/local-integration/OnConnectionError.spec.ts index 0f50094c..87a74505 100644 --- a/src/__tests__/zeebe/local-integration/OnConnectionError.spec.ts +++ b/src/__tests__/zeebe/local-integration/OnConnectionError.spec.ts @@ -132,11 +132,6 @@ xtest('Does not call the onConnectionError handler if there is a business error' let wf = 'arstsrasrateiuhrastulyharsntharsie' const zbc2 = new ZeebeGrpcClient() zbc2.on('connectionError', () => { - // tslint:disable-next-line: no-console - // console.log('OnConnectionError!!!! Incrementing calledF') // @DEBUG - // const e = new Error() - // tslint:disable-next-line: no-console - // console.log(e.stack) // @DEBUG calledF++ }) diff --git a/src/__tests__/zeebe/stringifyVariables.unit.spec.ts b/src/__tests__/zeebe/stringifyVariables.unit.spec.ts index 4b493e0f..5b658d78 100644 --- a/src/__tests__/zeebe/stringifyVariables.unit.spec.ts +++ b/src/__tests__/zeebe/stringifyVariables.unit.spec.ts @@ -50,6 +50,12 @@ test('stringifyVariables stringifies the variables key of a job object', () => { expect(stringified.variables).toBe(expectedStringifiedVariables) }) +test('stringifyVariables throws an error when passed an array', () => { + const arrayInput = { variables: ['something'] } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + expect(() => stringifyVariables(arrayInput as any)).toThrow(Error) +}) + test('parseVariables returns a new object', () => { expect(parseVariables(jobDictionary)).not.toEqual(jobDictionary) }) diff --git a/src/admin/lib/AdminApiClient.ts b/src/admin/lib/AdminApiClient.ts index 0dd70fba..8a75f860 100644 --- a/src/admin/lib/AdminApiClient.ts +++ b/src/admin/lib/AdminApiClient.ts @@ -61,6 +61,7 @@ export class AdminApiClient { ), ], beforeError: [gotBeforeErrorHook], + beforeRequest: config.middleware ?? [], }, }) ) diff --git a/src/c8/index.ts b/src/c8/index.ts index 2c6b55a7..64ac0050 100644 --- a/src/c8/index.ts +++ b/src/c8/index.ts @@ -1,9 +1,9 @@ import { AdminApiClient } from '../admin' import { + Camunda8ClientConfiguration, CamundaEnvironmentConfigurator, CamundaPlatform8Configuration, constructOAuthProvider, - DeepPartial, } from '../lib' import { ModelerApiClient } from '../modeler' import { IOAuthProvider } from '../oauth' @@ -12,6 +12,9 @@ import { OptimizeApiClient } from '../optimize' import { TasklistApiClient } from '../tasklist' import { ZeebeGrpcClient, ZeebeRestClient } from '../zeebe' +import { getLogger, Logger } from './lib/C8Logger' +import { CamundaRestClient } from './lib/CamundaRestClient' + /** * A single point of configuration for all Camunda Platform 8 clients. * @@ -23,12 +26,12 @@ import { ZeebeGrpcClient, ZeebeRestClient } from '../zeebe' * * const c8 = new Camunda8() * const zeebe = c8.getZeebeGrpcApiClient() - * const zeebeRest = c8.getZeebeRestClient() * const operate = c8.getOperateApiClient() * const optimize = c8.getOptimizeApiClient() * const tasklist = c8.getTasklistApiClient() * const modeler = c8.getModelerApiClient() * const admin = c8.getAdminApiClient() + * const c8Rest = c8.getCamundaRestClient() * ``` */ export class Camunda8 { @@ -41,80 +44,115 @@ export class Camunda8 { private zeebeRestClient?: ZeebeRestClient private configuration: CamundaPlatform8Configuration private oAuthProvider: IOAuthProvider + private camundaRestClient?: CamundaRestClient + public log: Logger - constructor(config: DeepPartial = {}) { + /** + * All constructor parameters for configuration are optional. If no configuration is provided, the SDK will use environment variables to configure itself. + */ + constructor(config: Camunda8ClientConfiguration = {}) { this.configuration = CamundaEnvironmentConfigurator.mergeConfigWithEnvironment(config) this.oAuthProvider = constructOAuthProvider(this.configuration) + this.log = getLogger(config) } - public getOperateApiClient(): OperateApiClient { + public getOperateApiClient( + config: Camunda8ClientConfiguration = {} + ): OperateApiClient { if (!this.operateApiClient) { this.operateApiClient = new OperateApiClient({ - config: this.configuration, + config: { ...this.configuration, ...config }, oAuthProvider: this.oAuthProvider, }) } return this.operateApiClient } - public getAdminApiClient(): AdminApiClient { + public getAdminApiClient( + config: Camunda8ClientConfiguration = {} + ): AdminApiClient { if (!this.adminApiClient) { this.adminApiClient = new AdminApiClient({ - config: this.configuration, + config: { ...this.configuration, ...config }, oAuthProvider: this.oAuthProvider, }) } return this.adminApiClient } - public getModelerApiClient(): ModelerApiClient { + public getModelerApiClient( + config: Camunda8ClientConfiguration = {} + ): ModelerApiClient { if (!this.modelerApiClient) { this.modelerApiClient = new ModelerApiClient({ - config: this.configuration, + config: { ...this.configuration, ...config }, oAuthProvider: this.oAuthProvider, }) } return this.modelerApiClient } - public getOptimizeApiClient(): OptimizeApiClient { + public getOptimizeApiClient( + config: Camunda8ClientConfiguration = {} + ): OptimizeApiClient { if (!this.optimizeApiClient) { this.optimizeApiClient = new OptimizeApiClient({ - config: this.configuration, + config: { ...this.configuration, ...config }, oAuthProvider: this.oAuthProvider, }) } return this.optimizeApiClient } - public getTasklistApiClient(): TasklistApiClient { + public getTasklistApiClient( + config: Camunda8ClientConfiguration = {} + ): TasklistApiClient { if (!this.tasklistApiClient) { this.tasklistApiClient = new TasklistApiClient({ - config: this.configuration, + config: { ...this.configuration, ...config }, oAuthProvider: this.oAuthProvider, }) } return this.tasklistApiClient } - public getZeebeGrpcApiClient(): ZeebeGrpcClient { + public getZeebeGrpcApiClient( + config: Camunda8ClientConfiguration = {} + ): ZeebeGrpcClient { if (!this.zeebeGrpcApiClient) { this.zeebeGrpcApiClient = new ZeebeGrpcClient({ - config: this.configuration, + config: { ...this.configuration, ...config }, oAuthProvider: this.oAuthProvider, }) } return this.zeebeGrpcApiClient } - public getZeebeRestClient(): ZeebeRestClient { + /** + * @deprecated from 8.6.0. Please use getCamundaRestClient() instead. + */ + public getZeebeRestClient( + config: Camunda8ClientConfiguration = {} + ): ZeebeRestClient { if (!this.zeebeRestClient) { this.zeebeRestClient = new ZeebeRestClient({ - config: this.configuration, + config: { ...this.configuration, ...config }, oAuthProvider: this.oAuthProvider, }) } return this.zeebeRestClient } + + public getCamundaRestClient( + config: Camunda8ClientConfiguration = {} + ): CamundaRestClient { + if (!this.camundaRestClient) { + this.camundaRestClient = new CamundaRestClient({ + config: { ...this.configuration, ...config }, + oAuthProvider: this.oAuthProvider, + }) + } + return this.camundaRestClient + } } diff --git a/src/c8/lib/C8Dto.ts b/src/c8/lib/C8Dto.ts new file mode 100644 index 00000000..b09278c1 --- /dev/null +++ b/src/c8/lib/C8Dto.ts @@ -0,0 +1,355 @@ +import { LosslessNumber } from 'lossless-json' + +import { Int64String, LosslessDto } from '../../lib' +import { ICustomHeaders, IInputVariables, JSONDoc } from '../../zeebe/types' + +export class RestApiJob< + Variables = LosslessDto, + CustomHeaders = LosslessDto, +> extends LosslessDto { + @Int64String + jobKey!: string + type!: string + @Int64String + processInstanceKey!: string + processDefinitionId!: string + processDefinitionVersion!: number + @Int64String + processDefinitionKey!: string + elementId!: string + @Int64String + elementInstanceKey!: string + customHeaders!: CustomHeaders + worker!: string + retries!: number + @Int64String + deadline!: string + variables!: Variables + tenantId!: string +} + +/** + * JSON object with changed task attribute values. + */ +export interface TaskChangeSet { + /* The due date of the task. Reset by providing an empty String. */ + dueDate?: Date | string + /* The follow-up date of the task. Reset by providing an empty String. */ + followUpDate?: Date | string + /* The list of candidate users of the task. Reset by providing an empty list. */ + candidateUsers?: string[] + /* The list of candidate groups of the task. Reset by providing an empty list. */ + candidateGroups?: string[] +} + +/** JSON object with changed job attribute values. */ +export interface JobUpdateChangeset { + /* The new amount of retries for the job; must be a positive number. */ + retries?: number + /** The duration of the new timeout in ms, starting from the current moment. */ + timeout?: number +} + +export interface NewUserInfo { + password: string + id: number + username: string + name: string + email: string + enabled: boolean +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type Ctor = new (obj: any) => T + +export class ProcessDeployment extends LosslessDto { + processDefinitionId!: string + processDefinitionVersion!: number + @Int64String + processDefinitionKey!: string + resourceName!: string + tenantId!: string +} + +export class DecisionDeployment extends LosslessDto { + dmnDecisionId!: string + version!: number + @Int64String + decisionKey!: string + dmnDecisionName!: string + tenantId!: string + dmnDecisionRequirementsId!: string + @Int64String + dmnDecisionRequirementsKey!: string +} + +export class DecisionRequirementsDeployment extends LosslessDto { + dmnDecisionRequirementsId!: string + version!: number + dmnDecisionRequirementsName!: string + tenantId!: string + @Int64String + dmnDecisionRequirementsKey!: string + resourceName!: string +} +export class FormDeployment { + formId!: string + version!: number + @Int64String + formKey!: string + resourceName!: string + tenantId!: string +} + +export class DeployResourceResponseDto extends LosslessDto { + @Int64String + deploymentKey!: string + deployments!: ( + | { processDefinition: ProcessDeployment } + | { decisionDefinition: DecisionDeployment } + | { decisionRequirements: DecisionRequirementsDeployment } + | { form: FormDeployment } + )[] + tenantId!: string +} + +export class DeployResourceResponse extends DeployResourceResponseDto { + processes!: ProcessDeployment[] + decisions!: DecisionDeployment[] + decisionRequirements!: DecisionRequirementsDeployment[] + forms!: FormDeployment[] +} + +export class CreateProcessInstanceResponse> { + /** + * The unique key identifying the process definition (e.g. returned from a process + * in the DeployResourceResponse message) + */ + @Int64String + readonly processDefinitionKey!: string + /** + * The BPMN process ID of the process definition + */ + readonly processDefinitionId!: string + /** + * The version of the process; set to -1 to use the latest version + */ + readonly version!: number + @Int64String + readonly processInstanceKey!: string + /** + * the tenant identifier of the created process instance + */ + readonly tenantId!: string + /** + * If `awaitCompletion` is true, this will be populated with the output variables. Otherwise, it will be an empty object. + */ + readonly variables!: T +} + +export interface MigrationMappingInstruction { + /** The element ID to migrate from. */ + sourceElementId: string + /** The element ID to migrate into. */ + targetElementId: string +} + +/** Migrates a process instance to a new process definition. + * This request can contain multiple mapping instructions to define mapping between the active process instance's elements and target process definition elements. + */ +export interface MigrationRequest { + processInstanceKey: string + /** The key of process definition to migrate the process instance to. */ + targetProcessDefinitionKey: string + mappingInstructions: MigrationMappingInstruction[] + /** A reference key chosen by the user that will be part of all records resulting from this operation. Must be > 0 if provided. */ + operationReference?: number | LosslessNumber +} + +/** The signal was broadcast. */ +export class BroadcastSignalResponse extends LosslessDto { + @Int64String + /** The unique ID of the signal that was broadcast. */ + signalKey!: string + /** The tenant ID of the signal that was broadcast. */ + tenantId!: string +} + +export interface UpdateElementVariableRequest { + /** + * The key of the element instance to update the variables for. + * This can be the process instance key (as obtained during instance creation), or a given element, + * such as a service task (see the elementInstanceKey on the job message). */ + elementInstanceKey: string + variables: JSONDoc | LosslessDto + /** + * Defaults to false. + * If set to true, the variables are merged strictly into the local scope (as specified by the elementInstanceKey). Otherwise, the variables are propagated to upper scopes and set at the outermost one. + * Let’s consider the following example: + * There are two scopes '1' and '2'. Scope '1' is the parent scope of '2'. The effective variables of the scopes are: 1 => { "foo" : 2 } 2 => { "bar" : 1 } + * An update request with elementInstanceKey as '2', variables { "foo" : 5 }, and local set to true leaves scope '1' unchanged and adjusts scope '2' to { "bar" : 1, "foo" 5 }. + * By default, with local set to false, scope '1' will be { "foo": 5 } and scope '2' will be { "bar" : 1 }. + */ + local?: boolean + /** + * A reference key chosen by the user that will be part of all records resulting from this operation. + * Must be > 0 if provided. + */ + operationReference?: number +} + +export class CorrelateMessageResponse extends LosslessDto { + /** the unique ID of the message that was published */ + @Int64String + key!: string + /** the tenantId of the message */ + tenantId!: string + /** The key of the first process instance the message correlated with */ + @Int64String + processInstanceKey!: string +} + +export class PublishMessageResponse extends LosslessDto { + /** the unique ID of the message that was published */ + @Int64String + key!: string + /** the tenantId of the message */ + tenantId!: string +} + +export interface CreateProcessBaseRequest { + /** + * the version of the process; if not specified it will use the latest version + */ + version?: number + /** + * JSON document that will instantiate the variables for the root variable scope of the + * process instance. + */ + variables: V + /** The tenantId for a multi-tenant enabled cluster. */ + tenantId?: string + /** a reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: number | LosslessNumber + /** + * List of start instructions. If empty (default) the process instance + * will start at the start event. If non-empty the process instance will apply start + * instructions after it has been created + */ + startInstructions?: ProcessInstanceCreationStartInstruction[] + /** + * Wait for the process instance to complete. If the process instance completion does not occur within the requestTimeout, the request will be closed. Defaults to false. + */ + // This is commented out, because we used specialised methods for the two cases. + // awaitCompletion?: boolean + /** + * Timeout (in ms) the request waits for the process to complete. By default or when set to 0, the generic request timeout configured in the cluster is applied. + */ + requestTimeout?: number +} + +export interface ProcessInstanceCreationStartInstruction { + /** + * future extensions might include + * - different types of start instructions + * - ability to set local variables for different flow scopes + * for now, however, the start instruction is implicitly a + * "startBeforeElement" instruction + */ + elementId: string +} + +export interface CreateProcessInstanceFromProcessDefinitionId< + V extends JSONDoc | LosslessDto, +> extends CreateProcessBaseRequest { + /** + * the BPMN process ID of the process definition + */ + processDefinitionId: string +} + +export interface CreateProcessInstanceFromProcessDefinition< + V extends JSONDoc | LosslessDto, +> extends CreateProcessBaseRequest { + /** + * the key of the process definition + */ + processDefinitionKey: string +} + +export type CreateProcessInstanceReq = + | CreateProcessInstanceFromProcessDefinitionId + | CreateProcessInstanceFromProcessDefinition + +export interface PatchAuthorizationRequest { + /** The key of the owner of the authorization. */ + ownerKey: string + /** Indicates if permissions should be added or removed. */ + action: 'ADD' | 'REMOVE' + /** The type of resource to add/remove perissions to/from. */ + resourceType: + | 'AUTHORIZATION' + | 'MESSAGE' + | 'JOB' + | 'APPLICATION' + | 'TENANT' + | 'DEPLOYMENT' + | 'PROCESS_DEFINITION' + | 'USER_TASK' + | 'DECISION_REQUIREMENTS_DEFINITION' + | 'DECISION_DEFINITION' + | 'USER_GROUP' + | 'USER' + | 'ROLE' + /** The permissions to add/remove. */ + permissions: { + /** Specifies the type of permissions. */ + permissionType: 'CREATE' | 'READ' | 'UPDATE' | 'DELETE' + /** A list of resource IDs the permission relates to. */ + resourceIds: [] + }[] +} + +export interface RestJob< + Variables = IInputVariables, + CustomHeaderShape = ICustomHeaders, +> { + /** The key, a unique identifier for the job */ + readonly jobKey: string + /** + * The job type, as defined in the BPMN process (e.g. ) + */ + readonly type: string + /** The job's process instance key */ + readonly processInstanceKey: string + /** The bpmn process ID of the job process definition */ + readonly bpmnProcessId: string + /** The version of the job process definition */ + readonly processDefinitionVersion: number + /** The associated task element ID */ + readonly elementId: string + /** + * The unique key identifying the associated task, unique within the scope of the + * process instance + */ + readonly elementInstanceKey: string + /** + * A set of custom headers defined during modelling + */ + readonly customHeaders: Readonly + /** The name of the worker that activated this job */ + readonly worker: string + /* The amount of retries left to this job (should always be positive) */ + readonly retries: number + /** Epoch milliseconds */ + readonly deadline: string + /** + * All visible variables in the task scope, computed at activation time. + */ + readonly variables: Readonly + /** + * The `tenantId` of the job in a multi-tenant cluster + */ + readonly tenantId: string +} diff --git a/src/c8/lib/C8Logger.ts b/src/c8/lib/C8Logger.ts new file mode 100644 index 00000000..03b022d2 --- /dev/null +++ b/src/c8/lib/C8Logger.ts @@ -0,0 +1,52 @@ +import winston from 'winston' // Import Winston + +import { + Camunda8ClientConfiguration, + CamundaEnvironmentConfigurator, +} from '../../lib' + +export type Logger = { + /* eslint-disable @typescript-eslint/no-explicit-any */ + info: (message: string | undefined, ...meta: any[]) => void + warn: (message: string | undefined, ...meta: any[]) => void + error: (message: string | undefined, ...meta: any[]) => void + debug: (message: string | undefined, ...meta: any[]) => void + trace: (message: string | undefined, ...meta: any[]) => void + /* eslint-enable @typescript-eslint/no-explicit-any */ +} + +let defaultLogger: Logger +let cachedLogger: Logger | undefined + +export function getLogger(config?: Camunda8ClientConfiguration) { + const configuration = + CamundaEnvironmentConfigurator.mergeConfigWithEnvironment(config ?? {}) + // We assume that the SDK user uses a single winston instance for 100% of logging, or no logger at all (in which case we create our own) + if (config?.logger && cachedLogger !== config.logger) { + cachedLogger = config.logger + config.logger.debug(`Using supplied logger`) + } + if (!defaultLogger) { + // Define the default logger + const logger: winston.Logger & { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + trace: (message: string | undefined, ...meta: any[]) => void + } = winston.createLogger({ + level: configuration.CAMUNDA_LOG_LEVEL, + format: winston.format.combine( + winston.format.timestamp(), + winston.format.colorize(), + winston.format.simple() + ), + transports: [new winston.transports.Console()], + }) as any // eslint-disable-line @typescript-eslint/no-explicit-any + + logger.trace = logger.silly + defaultLogger = logger + } + if (!cachedLogger) { + defaultLogger.debug(`Using default winston logger`) + cachedLogger = defaultLogger + } + return config?.logger ?? defaultLogger +} diff --git a/src/c8/lib/CamundaJobWorker.ts b/src/c8/lib/CamundaJobWorker.ts new file mode 100644 index 00000000..c41254f7 --- /dev/null +++ b/src/c8/lib/CamundaJobWorker.ts @@ -0,0 +1,210 @@ +import { EventEmitter } from 'events' + +import TypedEmitter from 'typed-emitter' + +import { LosslessDto } from '../../lib' +import { + ActivateJobsRequest, + IProcessVariables, + JobCompletionInterfaceRest, + MustReturnJobActionAcknowledgement, +} from '../../zeebe/types' + +import { Ctor, RestJob } from './C8Dto' +import { getLogger, Logger } from './C8Logger' +import { CamundaRestClient } from './CamundaRestClient' + +type CamundaJobWorkerEvents = { + pollError: (error: Error) => void + start: () => void + stop: () => void + poll: ({ + currentlyActiveJobCount, + maxJobsToActivate, + worker, + }: { + currentlyActiveJobCount: number + maxJobsToActivate: number + worker: string + }) => void +} + +export interface CamundaJobWorkerConfig< + VariablesDto extends LosslessDto, + CustomHeadersDto extends LosslessDto, +> extends ActivateJobsRequest { + inputVariableDto?: Ctor + customHeadersDto?: Ctor + /** How often the worker will poll for new jobs. Defaults to 30s */ + pollIntervalMs?: number + jobHandler: ( + job: RestJob & + JobCompletionInterfaceRest, + log: Logger + ) => MustReturnJobActionAcknowledgement + logger?: Logger + /** Default: true. Start the worker polling immediately. If set to `false`, call the worker's `start()` method to start polling for work. */ + autoStart?: boolean +} +// Make this class extend event emitter and have a typed event 'pollError' +export class CamundaJobWorker< + VariablesDto extends LosslessDto, + CustomHeadersDto extends LosslessDto, +> extends (EventEmitter as new () => TypedEmitter) { + public currentlyActiveJobCount = 0 + public capacity: number + private loopHandle?: NodeJS.Timeout + private pollInterval: number + public log: Logger + logMeta: () => { + worker: string + type: string + pollIntervalMs: number + capacity: number + currentload: number + } + + constructor( + private readonly config: CamundaJobWorkerConfig< + VariablesDto, + CustomHeadersDto + >, + private readonly restClient: CamundaRestClient + ) { + super() + this.pollInterval = config.pollIntervalMs ?? 30000 + this.capacity = this.config.maxJobsToActivate + this.log = getLogger({ logger: config.logger }) + this.logMeta = () => ({ + worker: this.config.worker, + type: this.config.type, + pollIntervalMs: this.pollInterval, + capacity: this.config.maxJobsToActivate, + currentload: this.currentlyActiveJobCount, + }) + this.log.debug(`Created REST Job Worker`, this.logMeta()) + if (config.autoStart ?? true) { + this.start() + } + } + + start() { + this.log.debug(`Starting poll loop`, this.logMeta()) + this.emit('start') + this.poll() + this.loopHandle = setInterval(() => this.poll(), this.pollInterval) + } + + /** Stops the Job Worker polling for more jobs. If await this call, and it will return as soon as all currently active jobs are completed. + * The deadline for all currently active jobs to complete is 30s by default. If the active jobs do not complete by the deadline, this method will throw. + */ + async stop(deadlineMs = 30000) { + this.log.debug(`Stop requested`, this.logMeta()) + /** Stopping polling for new jobs */ + clearInterval(this.loopHandle) + return new Promise((resolve, reject) => { + if (this.currentlyActiveJobCount === 0) { + this.log.debug(`All jobs drained. Worker stopped.`, this.logMeta()) + this.emit('stop') + return resolve(null) + } + /** This is an error timeout - if we don't complete all active jobs before the specified deadline, we reject the Promise */ + const timeout = setTimeout(() => { + clearInterval(wait) + this.log.debug( + `Failed to drain all jobs in ${deadlineMs}ms`, + this.logMeta() + ) + return reject(`Failed to drain all jobs in ${deadlineMs}ms`) + }, deadlineMs) + /** Check every 500ms to see if our active job count has hit zero, i.e: all active work is stopped */ + const wait = setInterval(() => { + if (this.currentlyActiveJobCount === 0) { + this.log.debug(`All jobs drained. Worker stopped.`, this.logMeta()) + clearInterval(wait) + clearTimeout(timeout) + this.emit('stop') + return resolve(null) + } + this.log.debug( + `Stopping - waiting for active jobs to complete.`, + this.logMeta() + ) + }, 500) + }) + } + + private poll() { + this.emit('poll', { + currentlyActiveJobCount: this.currentlyActiveJobCount, + maxJobsToActivate: this.config.maxJobsToActivate, + worker: this.config.worker, + }) + if (this.currentlyActiveJobCount >= this.config.maxJobsToActivate) { + this.log.debug(`At capacity - not requesting more jobs`, this.logMeta()) + return + } + + this.log.trace(`Polling for jobs`, this.logMeta()) + + const remainingJobCapacity = + this.config.maxJobsToActivate - this.currentlyActiveJobCount + this.restClient + .activateJobs({ + ...this.config, + maxJobsToActivate: remainingJobCapacity, + }) + .then((jobs) => { + const count = jobs.length + this.currentlyActiveJobCount += count + this.log.debug(`Activated ${count} jobs`, this.logMeta()) + // The job handlers for the activated jobs will run in parallel + jobs.forEach((job) => this.handleJob(job)) + }) + .catch((e) => this.emit('pollError', e)) + } + + private async handleJob( + job: RestJob & + JobCompletionInterfaceRest + ) { + try { + this.log.debug( + `Invoking job handler for job ${job.jobKey}`, + this.logMeta() + ) + await this.config.jobHandler(job, this.log) + this.log.debug( + `Completed job handler for job ${job.jobKey}.`, + this.logMeta() + ) + } catch (e) { + /** Unhandled exception in the job handler */ + if (e instanceof Error) { + // If err is an instance of Error, we can safely access its properties + this.log.error( + `Unhandled exception in job handler for job ${job.jobKey}`, + this.logMeta() + ) + this.log.error(`Error: ${e.message}`, { + stack: e.stack, + ...this.logMeta(), + }) + } else { + // If err is not an Error, log it as is + this.log.error( + 'An unknown error occurred while executing a job handler', + { error: e, ...this.logMeta() } + ) + } + this.log.error(`Failing the job`, this.logMeta()) + await job.fail({ + errorMessage: (e as Error).toString(), + retries: job.retries - 1, + }) + } finally { + /** Decrement the active job count in all cases */ + this.currentlyActiveJobCount-- + } + } +} diff --git a/src/c8/lib/CamundaRestClient.ts b/src/c8/lib/CamundaRestClient.ts new file mode 100644 index 00000000..56d6f4d8 --- /dev/null +++ b/src/c8/lib/CamundaRestClient.ts @@ -0,0 +1,968 @@ +import fs from 'node:fs' + +import { debug } from 'debug' +import FormData from 'form-data' +import got from 'got' +import { parse, stringify } from 'lossless-json' + +import { + Camunda8ClientConfiguration, + CamundaEnvironmentConfigurator, + constructOAuthProvider, + createUserAgentString, + GetCustomCertificateBuffer, + gotBeforeErrorHook, + gotErrorHandler, + GotRetryConfig, + LosslessDto, + losslessParse, + losslessStringify, + makeBeforeRetryHandlerFor401TokenRetry, + RequireConfiguration, +} from '../../lib' +import { IOAuthProvider } from '../../oauth' +import { + ActivateJobsRequest, + BroadcastSignalReq, + CompleteJobRequest, + ErrorJobWithVariables, + FailJobRequest, + IProcessVariables, + JOB_ACTION_ACKNOWLEDGEMENT, + JobCompletionInterfaceRest, + JSONDoc, + PublishMessageRequest, + TopologyResponse, +} from '../../zeebe/types' + +import { + BroadcastSignalResponse, + CorrelateMessageResponse, + CreateProcessInstanceReq, + CreateProcessInstanceResponse, + Ctor, + DecisionDeployment, + DecisionRequirementsDeployment, + DeployResourceResponse, + DeployResourceResponseDto, + FormDeployment, + JobUpdateChangeset, + MigrationRequest, + NewUserInfo, + PatchAuthorizationRequest, + ProcessDeployment, + PublishMessageResponse, + RestJob, + TaskChangeSet, + UpdateElementVariableRequest, +} from './C8Dto' +import { getLogger, Logger } from './C8Logger' +import { CamundaJobWorker, CamundaJobWorkerConfig } from './CamundaJobWorker' +import { createSpecializedRestApiJobClass } from './RestApiJobClassFactory' +import { createSpecializedCreateProcessInstanceResponseClass } from './RestApiProcessInstanceClassFactory' + +const trace = debug('camunda:zeebe-rest') + +const CAMUNDA_REST_API_VERSION = 'v2' + +class DefaultLosslessDto extends LosslessDto {} +/** + * The client for the unified Camunda 8 REST API. + * + * Logging: to enable debug tracing during development, you can set `DEBUG=camunda:zeebe-rest`. + * + * For production, you can pass in an instance of [winston.Logger](https://github.com/winstonjs/winston) to the constructor as `logger`. + * + * `CAMUNDA_LOG_LEVEL` in the environment or the constructor options can be used to set the log level to one of 'error', 'warn', 'info', 'http', 'verbose', 'debug', or 'silly'. + * + * @since 8.6.0 + * @experimental this API may be moved to an ESM package in a future release. Can you use ESM in your project? Comment [on this issue](https://github.com/camunda/camunda-8-js-sdk/issues/267). + */ +export class CamundaRestClient { + private userAgentString: string + private oAuthProvider: IOAuthProvider + private rest: Promise + private tenantId?: string + public log: Logger + + /** + * All constructor parameters for configuration are optional. If no configuration is provided, the SDK will use environment variables to configure itself. + */ + constructor(options?: { + config?: Camunda8ClientConfiguration + oAuthProvider?: IOAuthProvider + }) { + const config = CamundaEnvironmentConfigurator.mergeConfigWithEnvironment( + options?.config ?? {} + ) + this.log = getLogger(config) + this.log.debug(`Using REST API version ${CAMUNDA_REST_API_VERSION}`) + trace('options.config', options?.config) + trace('config', config) + this.oAuthProvider = + options?.oAuthProvider ?? constructOAuthProvider(config) + this.userAgentString = createUserAgentString(config) + this.tenantId = config.CAMUNDA_TENANT_ID + + const baseUrl = RequireConfiguration( + config.ZEEBE_REST_ADDRESS, + 'ZEEBE_REST_ADDRESS' + ) + + const prefixUrl = `${baseUrl}/${CAMUNDA_REST_API_VERSION}` + + this.rest = GetCustomCertificateBuffer(config).then( + (certificateAuthority) => + got.extend({ + prefixUrl, + retry: GotRetryConfig, + https: { + certificateAuthority, + }, + handlers: [gotErrorHandler], + hooks: { + beforeRetry: [ + makeBeforeRetryHandlerFor401TokenRetry( + this.getHeaders.bind(this) + ), + ], + beforeError: [gotBeforeErrorHook], + beforeRequest: [ + (options) => { + const body = options.body + const path = options.url.href + const method = options.method + trace(`${method} ${path}`) + trace(body) + this.log.debug(`${method} ${path}`) + this.log.trace(body?.toString()) + }, + ...(config.middleware ?? []), + ], + }, + }) + ) + } + + private async getHeaders() { + const token = await this.oAuthProvider.getToken('ZEEBE') + + const headers = { + 'content-type': 'application/json', + authorization: `Bearer ${token}`, + 'user-agent': this.userAgentString, + accept: '*/*', + } + const safeHeaders = { + ...headers, + authorization: + headers.authorization.substring(0, 15) + + (headers.authorization.length > 8) + ? '...' + : '', + } + trace('headers', safeHeaders) + return headers + } + + /** + * Manage the permissions assigned to authorization. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/patch-authorization/ + * + * @since 8.6.0 + */ + public async modifyAuthorization(req: PatchAuthorizationRequest) { + const headers = await this.getHeaders() + const { ownerKey, ...request } = req + return this.rest.then((rest) => + rest + .patch(`authorizations/${ownerKey}`, { + headers, + body: stringify(request), + }) + .json() + ) + } + + /** + * Broadcast a signal. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/broadcast-signal/ + * + * @since 8.6.0 + */ + public async broadcastSignal(req: BroadcastSignalReq) { + const headers = await this.getHeaders() + const request = this.addDefaultTenantId(req) + return this.rest.then((rest) => + rest + .post(`signals/broadcast`, { + headers, + body: stringify(request), + parseJson: (text) => losslessParse(text, BroadcastSignalResponse), + }) + .json() + ) + } + + /* Get the topology of the Zeebe cluster. */ + public async getTopology() { + const headers = await this.getHeaders() + return this.rest.then((rest) => + rest.get('topology', { headers }).json() + ) + } + + /** + * Complete a user task with the given key. The method either completes the task or throws 400, 404, or 409. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/zeebe-api-rest/specifications/complete-a-user-task/ + * + * @since 8.6.0 + */ + public async completeUserTask({ + userTaskKey, + variables = {}, + action = 'complete', + }: { + userTaskKey: string + variables?: Record + action?: string + }) { + const headers = await this.getHeaders() + return this.rest.then((rest) => + rest + .post(`user-tasks/${userTaskKey}/completion`, { + body: losslessStringify({ + variables, + action, + }), + headers, + }) + .json() + ) + } + + /** + * Assign a user task with the given key to the given assignee. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/assign-user-task/ + * + * @since 8.6.0 + */ + public async assignTask({ + userTaskKey, + assignee, + allowOverride = true, + action = 'assign', + }: { + /** The key of the user task to assign. */ + userTaskKey: string + /** The assignee for the user task. The assignee must not be empty or null. */ + assignee: string + /** By default, the task is reassigned if it was already assigned. Set this to false to return an error in such cases. The task must then first be unassigned to be assigned again. Use this when you have users picking from group task queues to prevent race conditions. */ + allowOverride?: boolean + /** A custom action value that will be accessible from user task events resulting from this endpoint invocation. If not provided, it will default to "assign". */ + action: string + }) { + const headers = await this.getHeaders() + const req = { + allowOverride, + action, + assignee, + } + return this.rest.then((rest) => + rest + .post(`user-tasks/${userTaskKey}/assignment`, { + body: losslessStringify(req), + headers, + }) + .json() + ) + } + + /** + * Update a user task with the given key. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/update-user-task/ + * + * @since 8.6.0 + */ + public async updateTask({ + userTaskKey, + changeset, + }: { + userTaskKey: string + changeset: TaskChangeSet + }) { + const headers = await this.getHeaders() + return this.rest.then((rest) => + rest + .patch(`user-tasks/${userTaskKey}/update`, { + body: losslessStringify(changeset), + headers, + }) + .json() + ) + } + /** + * Remove the assignee of a task with the given key. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/unassign-user-task/ + * + * @since 8.6.0 + */ + public async unassignTask({ userTaskKey }: { userTaskKey: string }) { + const headers = await this.getHeaders() + return this.rest.then((rest) => + rest.delete(`user-tasks/${userTaskKey}/assignee`, { headers }).json() + ) + } + + /** + * Create a user. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/create-user/ + * + * @since 8.6.0 + */ + public async createUser(newUserInfo: NewUserInfo) { + const headers = await this.getHeaders() + return this.rest.then((rest) => + rest + .post(`users`, { + body: JSON.stringify(newUserInfo), + headers, + }) + .json() + ) + } + + /** + * Search for user tasks based on given criteria. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/query-user-tasks-alpha/ + * @experimental + */ + // public async queryTasks() {} + + /** + * Publish a Message and correlates it to a subscription. If correlation is successful it will return the first process instance key the message correlated with. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/correlate-a-message/ + * + * @since 8.6.0 + */ + public async correlateMessage( + message: Pick< + PublishMessageRequest, + 'name' | 'correlationKey' | 'variables' | 'tenantId' + > + ) { + const headers = await this.getHeaders() + const req = this.addDefaultTenantId(message) + const body = losslessStringify(req) + return this.rest.then((rest) => + rest + .post(`messages/correlation`, { + body, + headers, + parseJson: (text) => losslessParse(text, CorrelateMessageResponse), + }) + .json() + ) + } + + /** + * Publish a single message. Messages are published to specific partitions computed from their correlation keys. This method does not wait for a correlation result. Use `correlateMessage` for such use cases. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/publish-a-message/ + * + * @since 8.6.0 + */ + public async publishMessage(publishMessageRequest: PublishMessageRequest) { + const headers = await this.getHeaders() + const req = this.addDefaultTenantId(publishMessageRequest) + const body = losslessStringify(req) + return this.rest.then((rest) => + rest + .post(`messages/publication`, { + headers, + body, + parseJson: (text) => losslessParse(text, PublishMessageResponse), + }) + .json() + ) + } + + /** + * Obtains the status of the current Camunda license. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/get-status-of-camunda-license/ + * + * @since 8.6.0 + */ + public async getLicenseStatus(): Promise<{ + vaildLicense: boolean + licenseType: string + }> { + return this.rest.then((rest) => rest.get(`license`).json()) + } + + /** + * Create a new polling Job Worker. + * You can pass in an optional winston.Logger instance as `logger`. This enables you to have distinct logging levels for different workers. + * + * @since 8.6.0 + */ + public createJobWorker< + Variables extends LosslessDto, + CustomHeaders extends LosslessDto, + >(config: CamundaJobWorkerConfig) { + const worker = new CamundaJobWorker(config, this) + // worker.start() + return worker + } + /** + * Iterate through all known partitions and activate jobs up to the requested maximum. + * + * The parameter `inputVariablesDto` is a Dto to decode the job payload. The `customHeadersDto` parameter is a Dto to decode the custom headers. + * Pass in a Dto class that extends LosslessDto to provide both type information in your code, + * and safe interoperability with applications that use the `int64` type in variables. + * + * @since 8.6.0 + */ + public async activateJobs< + VariablesDto extends LosslessDto, + CustomHeadersDto extends LosslessDto, + >( + request: ActivateJobsRequest & { + inputVariableDto?: Ctor + customHeadersDto?: Ctor + } + ): Promise< + (RestJob & + JobCompletionInterfaceRest)[] + > { + const headers = await this.getHeaders() + + const { + inputVariableDto = LosslessDto, + customHeadersDto = LosslessDto, + tenantIds = this.tenantId ? [this.tenantId] : undefined, + ...req + } = request + + /** + * The ActivateJobs endpoint can take multiple tenantIds, and activate jobs for multiple tenants at once. + */ + const body = losslessStringify({ + ...req, + tenantIds, + }) + + const jobDto = createSpecializedRestApiJobClass( + inputVariableDto, + customHeadersDto + ) + + return this.rest.then((rest) => + rest + .post(`jobs/activation`, { + body, + headers, + parseJson: (text) => losslessParse(text, jobDto, 'jobs'), + }) + .json[]>() + .then((activatedJobs) => activatedJobs.map(this.addJobMethods)) + ) + } + + /** + * Fails a job using the provided job key. This method sends a POST request to the endpoint '/jobs/{jobKey}/fail' with the failure reason and other details specified in the failJobRequest object. + * + * Documentation: https://docs.camunda.io/docs/next/apis-tools/camunda-api-rest/specifications/fail-job/ + * + * @since 8.6.0 + */ + public async failJob(failJobRequest: FailJobRequest) { + const { jobKey } = failJobRequest + const headers = await this.getHeaders() + return this.rest.then((rest) => + rest + .post(`jobs/${jobKey}/failure`, { + body: losslessStringify(failJobRequest), + headers, + }) + .then(() => JOB_ACTION_ACKNOWLEDGEMENT) + ) + } + + /** + * Report a business error (i.e. non-technical) that occurs while processing a job. + * + * Documentation: https://docs.camunda.io/docs/next/apis-tools/camunda-api-rest/specifications/report-error-for-job/ + * + * @since 8.6.0 + */ + public async errorJob( + errorJobRequest: ErrorJobWithVariables & { jobKey: string } + ) { + const { jobKey, ...request } = errorJobRequest + const headers = await this.getHeaders() + return this.rest.then((rest) => + rest + .post(`jobs/${jobKey}/error`, { + body: losslessStringify(request), + headers, + parseJson: (text) => losslessParse(text), + }) + .then(() => JOB_ACTION_ACKNOWLEDGEMENT) + ) + } + + /** + * Complete a job with the given payload, which allows completing the associated service task. + * + * Documentation: https://docs.camunda.io/docs/next/apis-tools/camunda-api-rest/specifications/complete-job/ + * + * @since 8.6.0 + */ + public async completeJob(completeJobRequest: CompleteJobRequest) { + const { jobKey } = completeJobRequest + const headers = await this.getHeaders() + const req = { variables: completeJobRequest.variables } + return this.rest.then((rest) => + rest + .post(`jobs/${jobKey}/completion`, { + body: losslessStringify(req), + headers, + }) + .then(() => JOB_ACTION_ACKNOWLEDGEMENT) + ) + } + + /** + * Update a job with the given key. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/update-a-job/ + * + * @since 8.6.0 + */ + public async updateJob( + jobChangeset: JobUpdateChangeset & { jobKey: string } + ) { + const { jobKey, ...changeset } = jobChangeset + const headers = await this.getHeaders() + return this.rest.then((rest) => + rest.patch(`jobs/${jobKey}`, { + body: JSON.stringify(changeset), + headers, + }) + ) + } + + /** + * Marks the incident as resolved; most likely a call to Update job will be necessary to reset the job's retries, followed by this call. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/resolve-incident/ + * + * @since 8.6.0 + */ + public async resolveIncident(incidentKey: string) { + const headers = await this.getHeaders() + return this.rest.then((rest) => + rest.post(`incidents/${incidentKey}/resolution`, { + headers, + }) + ) + } + + /** + * Create and start a process instance. This method does not await the outcome of the process. For that, use `createProcessInstanceWithResult`. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/create-process-instance/ + * + * @since 8.6.0 + */ + public async createProcessInstance( + request: CreateProcessInstanceReq + ): Promise> + + async createProcessInstance< + T extends JSONDoc | LosslessDto, + V extends LosslessDto, + >( + request: CreateProcessInstanceReq & { + outputVariablesDto?: Ctor + } + ) { + const headers = await this.getHeaders() + + const outputVariablesDto: Ctor | Ctor = + (request.outputVariablesDto as Ctor) ?? DefaultLosslessDto + + const CreateProcessInstanceResponseWithVariablesDto = + createSpecializedCreateProcessInstanceResponseClass(outputVariablesDto) + + return this.rest.then((rest) => + rest + .post(`process-instances`, { + body: losslessStringify(this.addDefaultTenantId(request)), + headers, + parseJson: (text) => + losslessParse(text, CreateProcessInstanceResponseWithVariablesDto), + }) + .json< + InstanceType + >() + ) + } + + /** + * Create and start a process instance. This method awaits the outcome of the process. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/create-process-instance/ + * + * @since 8.6.0 + */ + public async createProcessInstanceWithResult( + request: CreateProcessInstanceReq & { + /** An array of variable names to fetch. If not supplied, all visible variables in the root scope will be returned */ + fetchVariables?: string[] + } + ): Promise> + + public async createProcessInstanceWithResult< + T extends JSONDoc | LosslessDto, + V extends LosslessDto, + >( + request: CreateProcessInstanceReq & { + /** An array of variable names to fetch. If not supplied, all visible variables in the root scope will be returned */ + fetchVariables?: string[] + /** A Dto specifying the shape of the output variables. If not supplied, the output variables will be returned as a `LosslessDto` of type `unknown`. */ + outputVariablesDto: Ctor + } + ): Promise> + public async createProcessInstanceWithResult< + T extends JSONDoc | LosslessDto, + V, + >( + request: CreateProcessInstanceReq & { + outputVariablesDto?: Ctor + } + ) { + /** + * We override the type system to make `awaitCompletion` hidden from end-users. This has been done because supporting the permutations of + * creating a process with/without awaiting the result and with/without an outputVariableDto in a single method is complex. I could not get all + * the cases to work with intellisense for the end-user using either generics or with signature overloads. + * + * To address this, createProcessInstance has all the functionality, but hides the `awaitCompletion` attribute from the signature. This method + * is a wrapper around createProcessInstance that sets `awaitCompletion` to true, and explicitly informs the type system via signature overloads. + * + * This is not ideal, but it is the best solution I could come up with. + */ + return this.createProcessInstance({ + ...request, + awaitCompletion: true, + outputVariablesDto: request.outputVariablesDto, + } as unknown as CreateProcessInstanceReq) + } + + /** + * Cancel an active process instance + */ + public async cancelProcessInstance({ + processInstanceKey, + operationReference, + }: { + processInstanceKey: string + operationReference?: number + }) { + const headers = await this.getHeaders() + return this.rest.then((rest) => + rest.post(`process-instances/${processInstanceKey}/cancellation`, { + body: JSON.stringify({ operationReference }), + headers, + }) + ) + } + + /** + * Migrates a process instance to a new process definition. + * This request can contain multiple mapping instructions to define mapping between the active process instance's elements and target process definition elements. + * Use this to upgrade a process instance to a new version of a process or to a different process definition, e.g. to keep your running instances up-to-date with the latest process improvements. + * + * Documentation: https://docs.camunda.io/docs/next/apis-tools/camunda-api-rest/specifications/migrate-process-instance/ + * + * @since 8.6.0 + */ + public async migrateProcessInstance(req: MigrationRequest) { + const headers = await this.getHeaders() + const { processInstanceKey, ...request } = req + this.log.debug(`Migrating process instance ${processInstanceKey}`, { + component: 'C8RestClient', + }) + return this.rest.then((rest) => + rest.post(`process-instances/${processInstanceKey}/migration`, { + headers, + body: losslessStringify(request), + }) + ) + } + + /** + * Deploy resources to the broker. + * @param resources - An array of binary data strings representing the resources to deploy. + * @param tenantId - Optional tenant ID to deploy the resources to. If not provided, the default tenant ID is used. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/deploy-resources/ + * + * @since 8.6.0 + */ + public async deployResources( + resources: { content: string; name: string }[], + tenantId?: string + ) { + const headers = await this.getHeaders() + const formData = new FormData() + + resources.forEach((resource) => { + formData.append(`resources`, resource.content, { + filename: resource.name, + }) + }) + + if (tenantId || this.tenantId) { + formData.append('tenantId', tenantId ?? this.tenantId) + } + + this.log.debug(`Deploying ${resources.length} resources`) + const res = await this.rest.then((rest) => + rest + .post('deployments', { + body: formData, + headers: { + ...headers, + ...formData.getHeaders(), + Accept: 'application/json', + }, + parseJson: (text) => parse(text), // we parse the response with LosslessNumbers, with no Dto + }) + .json() + ) + + /** + * Now we need to examine the response and parse the deployments to lossless Dtos + * We dynamically construct the response object for the caller, by examining the lossless response + * and re-parsing each of the deployments with the correct Dto. + */ + const deploymentResponse = new DeployResourceResponse() + deploymentResponse.deploymentKey = res.deploymentKey.toString() + deploymentResponse.tenantId = res.tenantId + deploymentResponse.deployments = [] + deploymentResponse.processes = [] + deploymentResponse.decisions = [] + deploymentResponse.decisionRequirements = [] + deploymentResponse.forms = [] + + /** + * Type-guard assertions to correctly type the deployments. The API returns an array with mixed types. + */ + const isProcessDeployment = ( + deployment + ): deployment is { processDefinition: ProcessDeployment } => + !!deployment.processDefinition + const isDecisionDeployment = ( + deployment + ): deployment is { decisionDefinition: DecisionDeployment } => + !!deployment.decisionDefinition + const isDecisionRequirementsDeployment = ( + deployment + ): deployment is { decisionRequirements: DecisionRequirementsDeployment } => + !!deployment.decisionRequirements + const isFormDeployment = ( + deployment + ): deployment is { form: FormDeployment } => !!deployment.form + + /** + * Here we examine each of the deployments returned from the API, and create a correctly typed + * object for each one. We also populate subkeys per type. This allows SDK users to work with + * types known ahead of time. + */ + res.deployments.forEach((deployment) => { + if (isProcessDeployment(deployment)) { + const processDeployment = losslessParse( + stringify(deployment.processDefinition)!, + ProcessDeployment + ) + deploymentResponse.deployments.push({ + processDefinition: processDeployment, + }) + deploymentResponse.processes.push(processDeployment) + } + if (isDecisionDeployment(deployment)) { + const decisionDeployment = losslessParse( + stringify(deployment)!, + DecisionDeployment + ) + deploymentResponse.deployments.push({ + decisionDefinition: decisionDeployment, + }) + deploymentResponse.decisions.push(decisionDeployment) + } + if (isDecisionRequirementsDeployment(deployment)) { + const decisionRequirementsDeployment = losslessParse( + stringify(deployment)!, + DecisionRequirementsDeployment + ) + deploymentResponse.deployments.push({ + decisionRequirements: decisionRequirementsDeployment, + }) + deploymentResponse.decisionRequirements.push( + decisionRequirementsDeployment + ) + } + if (isFormDeployment(deployment)) { + const formDeployment = losslessParse( + stringify(deployment)!, + FormDeployment + ) + deploymentResponse.deployments.push({ form: formDeployment }) + deploymentResponse.forms.push(formDeployment) + } + }) + + return deploymentResponse + } + + /** + * Deploy resources to Camunda 8 from files + * @param files an array of file paths + * + * @since 8.6.0 + */ + public async deployResourcesFromFiles(files: string[]) { + const resources: { content: string; name: string }[] = [] + + for (const file of files) { + resources.push({ + content: fs.readFileSync(file, { encoding: 'binary' }), + name: file, + }) + } + + return this.deployResources(resources) + } + + /** + * Deletes a deployed resource. This can be a process definition, decision requirements definition, or form definition deployed using the deploy resources endpoint. Specify the resource you want to delete in the resourceKey parameter. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/delete-resource/ + * + * @since 8.6.0 + */ + public async deleteResource(req: { + resourceKey: string + operationReference?: number + }) { + const headers = await this.getHeaders() + const { resourceKey, operationReference } = req + return this.rest.then((rest) => + rest.post(`resources/${resourceKey}/deletion`, { + headers, + body: stringify({ operationReference }), + }) + ) + } + + /** + * Set a precise, static time for the Zeebe engine's internal clock. + * When the clock is pinned, it remains at the specified time and does not advance. + * To change the time, the clock must be pinned again with a new timestamp, or reset. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/pin-internal-clock/ + * + * @since 8.6.0 + */ + public async pinInternalClock(epochMs: number) { + const headers = await this.getHeaders() + + return this.rest.then((rest) => + rest.put(`clock`, { + headers, + body: JSON.stringify({ timestamp: epochMs }), + }) + ) + } + + /** + * Resets the Zeebe engine's internal clock to the current system time, enabling it to tick in real-time. + * This operation is useful for returning the clock to normal behavior after it has been pinned to a specific time. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/reset-internal-clock/ + * + * @since 8.6.0 + */ + public async resetClock() { + const headers = await this.getHeaders() + return this.rest.then((rest) => rest.post(`clock/reset`, { headers })) + } + + /** + * Updates all the variables of a particular scope (for example, process instance, flow element instance) with the given variable data. + * Specify the element instance in the elementInstanceKey parameter. + * + * Documentation: https://docs.camunda.io/docs/apis-tools/camunda-api-rest/specifications/update-element-instance-variables/ + * + * @since 8.6.0 + */ + public async updateElementInstanceVariables( + req: UpdateElementVariableRequest + ) { + const headers = await this.getHeaders() + const { elementInstanceKey, ...request } = req + return this.rest.then((rest) => + rest.post(`element-instances/${elementInstanceKey}/variables`, { + headers, + body: stringify(request), + }) + ) + } + + private addJobMethods = ( + job: RestJob + ): RestJob & + JobCompletionInterfaceRest => { + return { + ...job, + cancelWorkflow: () => { + throw new Error('Not Implemented') + }, + complete: (variables: IProcessVariables = {}) => + this.completeJob({ + jobKey: job.jobKey, + variables, + }), + error: (error) => + this.errorJob({ + ...error, + jobKey: job.jobKey, + }), + fail: (failJobRequest) => this.failJob(failJobRequest), + /* This has an effect in a Job Worker, decrementing the currently active job count */ + forward: () => JOB_ACTION_ACKNOWLEDGEMENT, + modifyJobTimeout: ({ newTimeoutMs }: { newTimeoutMs: number }) => + this.updateJob({ jobKey: job.jobKey, timeout: newTimeoutMs }), + } + } + + /** + * Helper method to add the default tenantIds if we are not passed explicit tenantIds + */ + private addDefaultTenantId(request: T) { + const tenantId = request.tenantId ?? this.tenantId + return { ...request, tenantId } + } +} diff --git a/src/c8/lib/RestApiJobClassFactory.ts b/src/c8/lib/RestApiJobClassFactory.ts new file mode 100644 index 00000000..b1306c54 --- /dev/null +++ b/src/c8/lib/RestApiJobClassFactory.ts @@ -0,0 +1,67 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { LosslessDto } from '../../lib' + +import { RestApiJob } from './C8Dto' + +const factory = createMemoizedSpecializedRestApiJobClassFactory() + +// Creates a specialized RestApiJob class that is cached based on input variables and custom headers. +export const createSpecializedRestApiJobClass = < + Variables extends LosslessDto, + CustomHeaders extends LosslessDto, +>( + inputVariableDto: new (obj: any) => Variables, + customHeaders: new (obj: any) => CustomHeaders +) => { + // Assuming `createMemoizedSpecializedRestApiJobClassFactory` is available + return factory(inputVariableDto, customHeaders) +} + +function createMemoizedSpecializedRestApiJobClassFactory() { + const cache = new Map() + + return function < + Variables extends LosslessDto, + CustomHeaders extends LosslessDto, + >( + inputVariableDto: new (obj: any) => Variables, + customHeadersDto: new (obj: any) => CustomHeaders + ): new (obj: any) => RestApiJob { + // Create a unique cache key based on the class and inputs + const cacheKey = JSON.stringify({ + inputVariableDto, + customHeadersDto, + }) + + // Check for cached result + if (cache.has(cacheKey)) { + return cache.get(cacheKey) + } + + // Create a new class that extends the original class + class NewRestApiJobClass< + Variables extends LosslessDto, + CustomHeaders extends LosslessDto, + > extends RestApiJob {} + + // Use Reflect to define the metadata on the new class's prototype + Reflect.defineMetadata( + 'child:class', + inputVariableDto, + NewRestApiJobClass.prototype, + 'variables' + ) + Reflect.defineMetadata( + 'child:class', + customHeadersDto, + NewRestApiJobClass.prototype, + 'customHeaders' + ) + + // Store the new class in cache + cache.set(cacheKey, NewRestApiJobClass) + + // Return the new class + return NewRestApiJobClass + } +} diff --git a/src/c8/lib/RestApiProcessInstanceClassFactory.ts b/src/c8/lib/RestApiProcessInstanceClassFactory.ts new file mode 100644 index 00000000..d5f1654d --- /dev/null +++ b/src/c8/lib/RestApiProcessInstanceClassFactory.ts @@ -0,0 +1,55 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { LosslessDto } from '../../lib' + +import { CreateProcessInstanceResponse } from './C8Dto' + +const factory = + createMemoizedSpecializedCreateProcessInstanceResponseClassFactory() + +// Creates a specialized RestApiJob class that is cached based on output variables +export const createSpecializedCreateProcessInstanceResponseClass = < + Variables extends LosslessDto, +>( + outputVariableDto: new (obj: any) => Variables +) => { + return factory(outputVariableDto) +} + +function createMemoizedSpecializedCreateProcessInstanceResponseClassFactory() { + const cache = new Map() + + return function ( + outputVariableDto: new (obj: any) => Variables + ): new (obj: any) => CreateProcessInstanceResponse { + // Create a unique cache key based on the class and inputs + const cacheKey = JSON.stringify({ + outputVariableDto, + }) + + // Check for cached result + if (cache.has(cacheKey)) { + return cache.get(cacheKey) + } + + // Create a new class that extends the original class + class CustomCreateProcessInstanceResponseClass< + Variables extends LosslessDto, + > extends CreateProcessInstanceResponse { + variables!: Variables + } + + // Use Reflect to define the metadata on the new class's prototype + Reflect.defineMetadata( + 'child:class', + outputVariableDto, + CustomCreateProcessInstanceResponseClass.prototype, + 'variables' + ) + + // Store the new class in cache + cache.set(cacheKey, CustomCreateProcessInstanceResponseClass) + + // Return the new class + return CustomCreateProcessInstanceResponseClass + } +} diff --git a/src/index.ts b/src/index.ts index d9ee8014..6255b6de 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,6 +1,13 @@ import * as Admin from './admin' import { Camunda8 } from './c8' -import { BigIntValue, ChildDto, Int64String, LosslessDto } from './lib' +import { CamundaRestClient } from './c8/lib/CamundaRestClient' +import { + BigIntValue, + ChildDto, + Int64String, + LosslessDto, + createDtoInstance, +} from './lib' import * as Modeler from './modeler' import * as Auth from './oauth' import * as Operate from './operate' @@ -8,8 +15,24 @@ import * as Optimize from './optimize' import * as Tasklist from './tasklist' import * as Zeebe from './zeebe' -export { /*HTTPError,*/ RESTError } from './lib' +export { HTTPError } from './lib' -export const Dto = { ChildDto, BigIntValue, Int64String, LosslessDto } +export const Dto = { + ChildDto, + BigIntValue, + Int64String, + LosslessDto, + createDtoInstance, +} -export { Admin, Auth, Camunda8, Modeler, Operate, Optimize, Tasklist, Zeebe } +export { + Admin, + Auth, + Camunda8, + CamundaRestClient, + Modeler, + Operate, + Optimize, + Tasklist, + Zeebe, +} diff --git a/src/lib/Configuration.ts b/src/lib/Configuration.ts index 6ec78da1..60f6893d 100644 --- a/src/lib/Configuration.ts +++ b/src/lib/Configuration.ts @@ -1,6 +1,9 @@ +import { BeforeRequestHook } from 'got' import mergeWith from 'lodash.mergewith' import { createEnv } from 'neon-env' +import { Logger } from '../c8/lib/C8Logger' + const getMainEnv = () => createEnv({ /** Custom user agent */ @@ -20,10 +23,18 @@ const getMainEnv = () => optional: true, default: 1000, }, + /** The log level for logging. Defaults to 'info'. Values (in order of priority): 'error', 'warn', 'info', 'http', 'verbose', 'debug', 'silly' */ + CAMUNDA_LOG_LEVEL: { + type: 'string', + optional: true, + choices: ['error', 'warn', 'info', 'http', 'verbose', 'debug', 'silly'], + default: 'info', + }, /** The address for the Zeebe GRPC. */ ZEEBE_GRPC_ADDRESS: { type: 'string', optional: true, + default: 'localhost:26500', }, /** The address for the Zeebe REST API. Defaults to localhost:8080 */ ZEEBE_REST_ADDRESS: { @@ -35,7 +46,6 @@ const getMainEnv = () => ZEEBE_ADDRESS: { type: 'string', optional: true, - default: 'localhost:26500', }, /** This is the client ID for the client credentials */ ZEEBE_CLIENT_ID: { @@ -357,9 +367,8 @@ const getEnv = () => ({ // Helper type for enforcing array contents to match an object's keys // eslint-disable-next-line @typescript-eslint/no-explicit-any -type EnforceArrayContent = T extends Array - ? T - : never +type EnforceArrayContent = + T extends Array ? T : never // Function to create a complete keys array, enforcing completeness at compile time // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -378,6 +387,7 @@ export const CamundaEnvironmentVariableDictionary = 'CAMUNDA_CONSOLE_CLIENT_ID', 'CAMUNDA_CONSOLE_CLIENT_SECRET', 'CAMUNDA_CONSOLE_OAUTH_AUDIENCE', + 'CAMUNDA_LOG_LEVEL', 'CAMUNDA_MODELER_BASE_URL', 'CAMUNDA_MODELER_OAUTH_AUDIENCE', 'CAMUNDA_OPERATE_BASE_URL', @@ -424,8 +434,15 @@ export class CamundaEnvironmentConfigurator { export type CamundaPlatform8Configuration = ReturnType< typeof CamundaEnvironmentConfigurator.ENV -> +> & { + middleware?: BeforeRequestHook[] +} export type DeepPartial = { [K in keyof T]?: T[K] extends object ? DeepPartial : T[K] } + +export type Camunda8ClientConfiguration = + DeepPartial & { + logger?: Logger + } diff --git a/src/lib/CreateDtoInstance.ts b/src/lib/CreateDtoInstance.ts new file mode 100644 index 00000000..9350959f --- /dev/null +++ b/src/lib/CreateDtoInstance.ts @@ -0,0 +1,20 @@ +/** + * Create an instance of a DTO class with the provided data. + * + * This provides a type-safe method to create a DTO instance from a plain object. + * + * Node 22's experimental strip types does not play well with the previous "via the constructor" method. + * + * See: https://gist.github.com/jwulf/6e7b093b5b7b3e12c7b76f55b9e4be84 + * + * @param dtoClass + * @param dtoData + * @returns + */ +export function createDtoInstance(dtoClass: { new (): T }, dtoData: T) { + const newDto = new dtoClass() + for (const key in dtoData) { + newDto[key] = dtoData[key] + } + return newDto +} diff --git a/src/lib/GotErrors.ts b/src/lib/GotErrors.ts index 94d7dd47..bae73137 100644 --- a/src/lib/GotErrors.ts +++ b/src/lib/GotErrors.ts @@ -20,7 +20,7 @@ export class HTTPError extends Got.HTTPError { } } -export type RESTError = +export type RestError = | HTTPError | Got.RequestError | Got.ReadError diff --git a/src/lib/GotHooks.ts b/src/lib/GotHooks.ts index 2f3ca71d..b9159199 100644 --- a/src/lib/GotHooks.ts +++ b/src/lib/GotHooks.ts @@ -22,18 +22,22 @@ export const gotErrorHandler = (options, next) => { */ export const gotBeforeErrorHook = (error) => { const { request } = error + let detail = '' if (error instanceof GotHTTPError) { error = new HTTPError(error.response) try { - const details = JSON.parse((error.response?.body as string) || '{}') + const details = JSON.parse( + (error.response?.body as string) || '{detail:""}' + ) error.statusCode = details.status + detail = details.detail ?? '' } catch (e) { error.statusCode = 0 } } // eslint-disable-next-line @typescript-eslint/no-explicit-any ;(error as any).source = (error as any).options.context.stack.split('\n') - error.message += ` (request to ${request?.options.url.href})` + error.message += ` (request to ${request?.options.url.href}). ${detail}` return error } diff --git a/src/lib/LosslessJsonParser.ts b/src/lib/LosslessJsonParser.ts index 8c62a54d..336a6a69 100644 --- a/src/lib/LosslessJsonParser.ts +++ b/src/lib/LosslessJsonParser.ts @@ -1,13 +1,23 @@ /** * This is a custom JSON Parser that handles lossless parsing of int64 numbers by using the lossless-json library. * + * This is motivated by the use of int64 for Camunda 8 Entity keys, which are not supported by JavaScript's Number type. + * Variables could also contain unsafe large integers if an external system sends them to the broker. + * * It converts all JSON numbers to lossless numbers, then converts them back to the correct type based on the metadata * of a Dto class - fields decorated with `@Int64` are converted to a `string`, fields decorated with `@BigIntValue` are * converted to `bigint`. All other numbers are converted to `number`. Throws if a number cannot be safely converted. * * It also handles nested Dtos by using the `@ChildDto` decorator. * + * Update: added an optional `key` parameter to support the Camunda 8 REST API's use of an array under a key, e.g. { jobs : Job[] } + * + * Note: the parser uses DTO classes that extend the LosslessDto class to perform mappings of numeric types. However, only the type of + * the annotated numerics is type-checked at runtime. Fields of other types are not checked. + * * More details on the design here: https://github.com/camunda/camunda-8-js-sdk/issues/81#issuecomment-2022213859 + * + * See this article to understand why this is necessary: https://jsoneditoronline.org/indepth/parse/why-does-json-parse-corrupt-large-numbers/ */ /* eslint-disable @typescript-eslint/no-explicit-any */ @@ -23,6 +33,13 @@ import 'reflect-metadata' const debug = d('lossless-json-parser') +const MetadataKey = { + INT64_STRING: 'type:int64', + INT64_STRING_ARRAY: 'type:int64[]', + INT64_BIGINT: 'type:bigint', + INT64_BIGINT_ARRAY: 'type:bigint[]', + CHILD_DTO: 'child:class', +} /** * Decorate Dto string fields as `@Int64String` to specify that the JSON number property should be parsed as a string. * @example @@ -41,11 +58,34 @@ const debug = d('lossless-json-parser') * ``` */ export function Int64String(target: any, propertyKey: string | symbol): void { - Reflect.defineMetadata('type:int64', true, target, propertyKey) + Reflect.defineMetadata(MetadataKey.INT64_STRING, true, target, propertyKey) } /** - * Decorate Dto bigint fields as `@BigInt` to specify that the JSON number property should be parsed as a bigint. + * Decorate Dto string fields as `@Int64StringArray` to specify that the array of JSON numbers should be parsed as an array of strings. + * @example + * ```typescript + * class Dto extends LosslessDto { + * message!: string + * userId!: number + * @Int64StringArray + * sendTo!: string[] + * } + */ +export function Int64StringArray( + target: any, + propertyKey: string | symbol +): void { + Reflect.defineMetadata( + MetadataKey.INT64_STRING_ARRAY, + true, + target, + propertyKey + ) +} + +/** + * Decorate Dto bigint fields as `@BigIntValue` to specify that the JSON number property should be parsed as a bigint. * @example * ```typescript * class MyDto extends LosslessDto { @@ -62,9 +102,37 @@ export function Int64String(target: any, propertyKey: string | symbol): void { * ``` */ export function BigIntValue(target: any, propertKey: string | symbol): void { - Reflect.defineMetadata('type:bigint', true, target, propertKey) + Reflect.defineMetadata(MetadataKey.INT64_BIGINT, true, target, propertKey) } +/** + * Decorate Dto bigint fields as `@BigIntValueArray` to specify that the JSON number property should be parsed as a bigint. + * @example + * ```typescript + * class MyDto extends LosslessDto { + * @Int64String + * int64NumberField!: string + * @BigIntValueArray + * bigintField!: bigint[] + * @ChildDto(MyChildDto) + * childDtoField!: MyChildDto + * normalField!: string + * normalNumberField!: number + * maybePresentField?: string + * } + * ``` + */ +export function BigIntValueArray( + target: any, + propertKey: string | symbol +): void { + Reflect.defineMetadata( + MetadataKey.INT64_BIGINT_ARRAY, + true, + target, + propertKey + ) +} /** * Decorate a Dto object field as `@ChildDto` to specify that the JSON object property should be parsed as a child Dto. * @example @@ -88,7 +156,12 @@ export function BigIntValue(target: any, propertKey: string | symbol): void { */ export function ChildDto(childClass: any) { return function (target: any, propertyKey: string | symbol) { - Reflect.defineMetadata('child:class', childClass, target, propertyKey) + Reflect.defineMetadata( + MetadataKey.CHILD_DTO, + childClass, + target, + propertyKey + ) } } @@ -109,24 +182,10 @@ export function ChildDto(childClass: any) { * } * ``` */ -export class LosslessDto { - constructor(obj: any) { - if (obj) { - for (const [key, value] of Object.entries(obj)) { - this[key] = value - } - } - } -} - -export function losslessParseArray( - json: string, - dto?: { new (...args: any[]): T } -): T[] { - return losslessParse(json, dto) as T[] -} +export class LosslessDto {} /** + * losslessParse uses lossless-json parse to deserialize JSON. * With no Dto, the parser will throw if it encounters an int64 number that cannot be safely represented as a JS number. * * @param json the JSON string to parse @@ -134,10 +193,43 @@ export function losslessParseArray( */ export function losslessParse( json: string, - dto?: { new (...args: any[]): T } + dto?: { new (...args: any[]): T }, + keyToParse?: string ): T { + /** + * lossless-json parse converts all numerics to LosslessNumber type instead of number type. + * Here we safely parse the string into an JSON object with all numerics as type LosslessNumber. + * This way we lose no fidelity at this stage, and can then use a supplied DTO to map large numbers + * or throw if we find an unsafe number. + */ + const parsedLossless = parse(json) as any + /** + * Specifying a keyToParse value applies all the mapping functionality to a key of the object in the JSON. + * gRPC API responses were naked objects or arrays of objects. REST response shapes typically have + * an array under an object key - eg: { jobs: [ ... ] } + * + * Since we now have a safely parsed object, we can recursively call losslessParse with the key, if it exists. + */ + if (keyToParse) { + if (parsedLossless[keyToParse]) { + return losslessParse(stringify(parsedLossless[keyToParse]) as string, dto) + } + /** + * A key was specified, but it was not found on the parsed object. + * At this point we should throw, because we cannot perform the operation requested. Something has gone wrong with + * the expected shape of the response. + * + * We throw an error with the actual shape of the object to help with debugging. + */ + throw new Error( + `Attempted to parse key ${keyToParse} on an object that does not have this key: ${stringify( + parsedLossless + )}` + ) + } + if (Array.isArray(parsedLossless)) { debug(`Array input detected. Parsing array.`) return parseArrayWithAnnotations( @@ -152,6 +244,10 @@ export function losslessParse( debug(`Got a Dto ${dto.name}. Parsing with annotations.`) const parsed = parseWithAnnotations(parsedLossless, dto) debug(`Converting remaining lossless numbers to numbers for ${dto.name}`) + /** All numbers are parsed to LosslessNumber by lossless-json. For any fields that should be numbers, we convert them + * now to number. Because we expose large values as string or BigInt, the only Lossless numbers left on the object + * are unmapped. So at this point we convert all remaining LosslessNumbers to number type if safe, and throw if not. + */ return convertLosslessNumbersToNumberOrThrow(parsed) } @@ -162,35 +258,98 @@ function parseWithAnnotations( const instance = new dto() for (const [key, value] of Object.entries(obj)) { - const childClass = Reflect.getMetadata('child:class', dto.prototype, key) + const childClass = Reflect.getMetadata( + MetadataKey.CHILD_DTO, + dto.prototype, + key + ) if (childClass) { if (Array.isArray(value)) { // If the value is an array, parse each element with the specified child class instance[key] = value.map((item) => - losslessParse(stringify(item) as string, childClass) + losslessParse(stringify(item)!, childClass) ) } else { // If the value is an object, parse it with the specified child class - instance[key] = losslessParse(stringify(value) as string, childClass) + instance[key] = losslessParse(stringify(value)!, childClass) } } else { - if (Reflect.hasMetadata('type:int64', dto.prototype, key)) { + if ( + Reflect.hasMetadata(MetadataKey.INT64_STRING_ARRAY, dto.prototype, key) + ) { + debug(`Parsing int64 array field "${key}" to string`) + if (Array.isArray(value)) { + instance[key] = value.map((item) => { + if (isLosslessNumber(item)) { + return item.toString() + } else { + debug('Unexpected type for value', value) + throw new Error( + `Unexpected type: Received JSON ${typeof item} value for Int64String Dto field "${key}", expected number` + ) + } + }) + } else { + const type = value instanceof LosslessNumber ? 'number' : typeof value + throw new Error( + `Unexpected type: Received JSON ${type} value for Int64StringArray Dto field "${key}", expected Array` + ) + } + } else if ( + Reflect.hasMetadata(MetadataKey.INT64_STRING, dto.prototype, key) + ) { debug(`Parsing int64 field "${key}" to string`) if (value) { if (isLosslessNumber(value)) { - instance[key] = (value as LosslessNumber).toString() + instance[key] = value.toString() } else { + if (Array.isArray(value)) { + throw new Error( + `Unexpected type: Received JSON array value for Int64String Dto field "${key}", expected number. If you are expecting an array, use the @Int64StringArray decorator.` + ) + } + const type = + value instanceof LosslessNumber ? 'number' : typeof value + throw new Error( - `Unexpected type: Received JSON ${typeof value} value for Int64String Dto field "${key}", expected number` + `Unexpected type: Received JSON ${type} value for Int64String Dto field "${key}", expected number` ) } } - } else if (Reflect.hasMetadata('type:bigint', dto.prototype, key)) { + } else if ( + Reflect.hasMetadata(MetadataKey.INT64_BIGINT_ARRAY, dto.prototype, key) + ) { + debug(`Parsing int64 array field "${key}" to BigInt`) + if (Array.isArray(value)) { + instance[key] = value.map((item) => { + if (isLosslessNumber(item)) { + return BigInt(item.toString()) + } else { + debug('Unexpected type for value', value) + throw new Error( + `Unexpected type: Received JSON ${typeof item} value for BigIntValue in Dto field "${key}[]", expected number` + ) + } + }) + } else { + const type = value instanceof LosslessNumber ? 'number' : typeof value + throw new Error( + `Unexpected type: Received JSON ${type} value for BigIntValueArray Dto field "${key}", expected Array` + ) + } + } else if ( + Reflect.hasMetadata(MetadataKey.INT64_BIGINT, dto.prototype, key) + ) { debug(`Parsing bigint field ${key}`) if (value) { if (isLosslessNumber(value)) { - instance[key] = BigInt((value as LosslessNumber).toString()) + instance[key] = BigInt(value.toString()) } else { + if (Array.isArray(value)) { + throw new Error( + `Unexpected type: Received JSON array value for BigIntValue Dto field "${key}", expected number. If you are expecting an array, use the @BigIntValueArray decorator.` + ) + } throw new Error( `Unexpected type: Received JSON ${typeof value} value for BigIntValue Dto field "${key}", expected number` ) @@ -217,13 +376,22 @@ function parseArrayWithAnnotations( } /** - * Convert all `LosslessNumber` instances to a number or throw if any are unsafe + * Convert all `LosslessNumber` instances to a number or throw if any are unsafe. + * + * All numerics are converted to LosslessNumbers by lossless-json parse. Then, if a DTO was provided, + * all mappings have been done to either BigInt or string type. So all remaining LosslessNumbers in the object + * are either unmapped or mapped to number. + * + * Here we convert all remaining LosslessNumbers to a safe number value, or throw if an unsafe value is detected. */ function convertLosslessNumbersToNumberOrThrow(obj: any): T { debug(`Parsing LosslessNumbers to numbers for ${obj?.constructor?.name}`) if (!obj) { return obj } + if (obj instanceof LosslessNumber) { + return toSafeNumberOrThrow(obj.toString()) as T + } let currentKey = '' try { Object.keys(obj).forEach((key) => { @@ -262,6 +430,22 @@ export function losslessStringify( debug(`Object is not a LosslessDto. Stringifying as normal JSON.`) } + if (obj instanceof Date) { + throw new Error( + `Date type not supported in variables. Please serialize with .toISOString() before passing to Camunda` + ) + } + if (obj instanceof Map) { + throw new Error( + `Map type not supported in variables. Please serialize with Object.fromEntries() before passing to Camunda` + ) + } + if (obj instanceof Set) { + throw new Error( + `Set type not supported in variables. Please serialize with Array.from() before passing to Camunda` + ) + } + const newObj: any = Array.isArray(obj) ? [] : {} Object.keys(obj).forEach((key) => { @@ -270,11 +454,11 @@ export function losslessStringify( if (typeof value === 'object' && value !== null) { // If the value is an object or array, recurse into it newObj[key] = losslessStringify(value, false) - } else if (Reflect.getMetadata('type:int64', obj, key)) { + } else if (Reflect.getMetadata(MetadataKey.INT64_STRING, obj, key)) { // If the property is decorated with @Int64String, convert the string to a LosslessNumber debug(`Stringifying int64 string field ${key}`) newObj[key] = new LosslessNumber(value) - } else if (Reflect.getMetadata('type:bigint', obj, key)) { + } else if (Reflect.getMetadata(MetadataKey.INT64_BIGINT, obj, key)) { // If the property is decorated with @BigIntValue, convert the bigint to a LosslessNumber debug(`Stringifying bigint field ${key}`) newObj[key] = new LosslessNumber(value.toString()) diff --git a/src/lib/index.ts b/src/lib/index.ts index af6d9a5b..58adb557 100644 --- a/src/lib/index.ts +++ b/src/lib/index.ts @@ -1,6 +1,7 @@ export * from './ClientConstructor' export * from './Configuration' export * from './ConstructOAuthProvider' +export * from './CreateDtoInstance' export * from './CreateUserAgentString' export * from './Delay' export * from './EnvironmentSetup' diff --git a/src/modeler/lib/ModelerAPIClient.ts b/src/modeler/lib/ModelerAPIClient.ts index d052a22f..a5c0d103 100644 --- a/src/modeler/lib/ModelerAPIClient.ts +++ b/src/modeler/lib/ModelerAPIClient.ts @@ -56,6 +56,7 @@ export class ModelerApiClient { ), ], beforeError: [gotBeforeErrorHook], + beforeRequest: config.middleware ?? [], }, }) ) diff --git a/src/operate/lib/OperateApiClient.ts b/src/operate/lib/OperateApiClient.ts index 4833636c..f8da941f 100644 --- a/src/operate/lib/OperateApiClient.ts +++ b/src/operate/lib/OperateApiClient.ts @@ -111,6 +111,7 @@ export class OperateApiClient { ), ], beforeError: [gotBeforeErrorHook], + beforeRequest: config.middleware ?? [], }, }) ) diff --git a/src/optimize/lib/OptimizeApiClient.ts b/src/optimize/lib/OptimizeApiClient.ts index 29eb5d00..defea64a 100644 --- a/src/optimize/lib/OptimizeApiClient.ts +++ b/src/optimize/lib/OptimizeApiClient.ts @@ -89,6 +89,7 @@ export class OptimizeApiClient { ), ], beforeError: [gotBeforeErrorHook], + beforeRequest: config.middleware ?? [], }, }) ) diff --git a/src/proto/zeebe.proto b/src/proto/zeebe.proto index 0333ff8e..fd9bf17e 100644 --- a/src/proto/zeebe.proto +++ b/src/proto/zeebe.proto @@ -89,6 +89,8 @@ message CancelProcessInstanceRequest { // the process instance key (as, for example, obtained from // CreateProcessInstanceResponse) int64 processInstanceKey = 1; + // a reference key chosen by the user and will be part of all records resulted from this operation + optional uint64 operationReference = 2; } message CancelProcessInstanceResponse { @@ -124,6 +126,9 @@ message CreateProcessInstanceRequest { repeated ProcessInstanceCreationStartInstruction startInstructions = 5; // the tenant id of the process definition string tenantId = 6; + + // a reference key chosen by the user and will be part of all records resulted from this operation + optional uint64 operationReference = 7; } message ProcessInstanceCreationStartInstruction { @@ -481,6 +486,8 @@ message PublishMessageResponse { message ResolveIncidentRequest { // the unique ID of the incident to resolve int64 incidentKey = 1; + // a reference key chosen by the user and will be part of all records resulted from this operation + optional uint64 operationReference = 2; } message ResolveIncidentResponse { @@ -543,6 +550,8 @@ message UpdateJobRetriesRequest { int64 jobKey = 1; // the new amount of retries for the job; must be positive int32 retries = 2; + // a reference key chosen by the user and will be part of all records resulted from this operation + optional uint64 operationReference = 3; } message UpdateJobRetriesResponse { @@ -553,6 +562,8 @@ message UpdateJobTimeoutRequest { int64 jobKey = 1; // the duration of the new timeout in ms, starting from the current moment int64 timeout = 2; + // a reference key chosen by the user and will be part of all records resulted from this operation + optional uint64 operationReference = 3; } message UpdateJobTimeoutResponse { @@ -574,6 +585,8 @@ message SetVariablesRequest { // be unchanged, and scope 2 will now be `{ "bar" : 1, "foo" 5 }`. if local was false, however, // then scope 1 would be `{ "foo": 5 }`, and scope 2 would be `{ "bar" : 1 }`. bool local = 3; + // a reference key chosen by the user and will be part of all records resulted from this operation + optional uint64 operationReference = 4; } message SetVariablesResponse { @@ -589,6 +602,8 @@ message ModifyProcessInstanceRequest { repeated ActivateInstruction activateInstructions = 2; // instructions describing which elements should be terminated repeated TerminateInstruction terminateInstructions = 3; + // a reference key chosen by the user and will be part of all records resulted from this operation + optional uint64 operationReference = 4; message ActivateInstruction { // the id of the element that should be activated @@ -628,6 +643,8 @@ message MigrateProcessInstanceRequest { int64 processInstanceKey = 1; // the migration plan that defines target process and element mappings MigrationPlan migrationPlan = 2; + // a reference key chosen by the user and will be part of all records resulted from this operation + optional uint64 operationReference = 3; message MigrationPlan { // the key of process definition to migrate the process instance to @@ -652,6 +669,8 @@ message DeleteResourceRequest { // The key of the resource that should be deleted. This can either be the key // of a process definition, the key of a decision requirements definition or the key of a form. int64 resourceKey = 1; + // a reference key chosen by the user and will be part of all records resulted from this operation + optional uint64 operationReference = 2; } message DeleteResourceResponse { @@ -934,9 +953,12 @@ service Gateway { FAILED_PRECONDITION: - not all active elements in the given process instance are mapped to the elements in the target process definition - a mapping instruction changes the type of an element or event + - a mapping instruction changes the implementation of a task + - a mapping instruction detaches a boundary event from an active element - a mapping instruction refers to an unsupported element (i.e. some elements will be supported later on) - a mapping instruction refers to element in unsupported scenarios. (i.e. migration is not supported when process instance or target process elements contains event subscriptions) + - multiple mapping instructions target the same boundary event INVALID_ARGUMENT: - A `sourceElementId` does not refer to an element in the process instance's process definition diff --git a/src/tasklist/lib/TasklistApiClient.ts b/src/tasklist/lib/TasklistApiClient.ts index b4223545..cfdeaf0c 100644 --- a/src/tasklist/lib/TasklistApiClient.ts +++ b/src/tasklist/lib/TasklistApiClient.ts @@ -90,6 +90,7 @@ export class TasklistApiClient { ), ], beforeError: [gotBeforeErrorHook], + beforeRequest: config.middleware ?? [], }, }) ) diff --git a/src/zeebe/lib/GrpcClient.ts b/src/zeebe/lib/GrpcClient.ts index 069a16de..57a92df3 100644 --- a/src/zeebe/lib/GrpcClient.ts +++ b/src/zeebe/lib/GrpcClient.ts @@ -312,8 +312,6 @@ export class GrpcClient extends EventEmitter { this[`${methodName}Stream`] = async (data) => { debug(`Calling ${methodName}Stream...`, host) if (this.closing) { - // tslint:disable-next-line: no-console - console.log('Short-circuited on channel closed') // @DEBUG return } let stream: ClientReadableStream @@ -506,9 +504,6 @@ export class GrpcClient extends EventEmitter { }) return setTimeout(() => { - // tslint:disable-next-line: no-console - console.log(`Channel timeout after ${timeout}`) // @DEBUG - return isClosed(this.channelState) ? null : reject(new Error(`Didn't close in time: ${this.channelState}`)) @@ -660,8 +655,7 @@ export class GrpcClient extends EventEmitter { if (isError) { if ( callStatus.code === 1 && - callStatus.details.includes('503') // || - // callStatus.code === 13 + callStatus.details.includes('503') // 'Service Unavailable' ) { return this.emit(MiddlewareSignals.Event.GrpcInterceptError, { callStatus, diff --git a/src/zeebe/lib/ZBWorkerBase.ts b/src/zeebe/lib/ZBWorkerBase.ts index 730c1f65..0ec7da80 100644 --- a/src/zeebe/lib/ZBWorkerBase.ts +++ b/src/zeebe/lib/ZBWorkerBase.ts @@ -140,14 +140,12 @@ export class ZBWorkerBase< this.inputVariableDto = inputVariableDto ?? (LosslessDto as { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - new (obj: any): WorkerInputVariables + new (): WorkerInputVariables }) this.customHeadersDto = customHeadersDto ?? (LosslessDto as { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - new (obj: any): CustomHeaderShape + new (): CustomHeaderShape }) this.taskHandler = taskHandler this.taskType = taskType @@ -303,9 +301,6 @@ export class ZBWorkerBase< chalk.red(`WARNING: Call to ${thisMethod}() after ${methodCalled}() was called. You should call only one job action method in the worker handler. This is a bug in the ${this.taskType} worker handler.`) ) - // tslint:disable-next-line: no-console - console.log('handler', this.taskHandler.toString()) // @DEBUG - return wrappedFunction(...args) } methodCalled = thisMethod @@ -380,11 +375,13 @@ You should call only one job action method in the worker handler. This is a bug errorMessage, retries, retryBackOff, + variables, }: { job: ZB.Job errorMessage: string retries?: number retryBackOff?: number + variables?: ZB.JSONDoc }) { return this.zbClient .failJob({ @@ -392,6 +389,7 @@ You should call only one job action method in the worker handler. This is a bug jobKey: job.key, retries: retries ?? job.retries - 1, retryBackOff: retryBackOff ?? 0, + variables: variables ?? {}, }) .then(() => ZB.JOB_ACTION_ACKNOWLEDGEMENT) .finally(() => { diff --git a/src/zeebe/lib/interfaces-1.0.ts b/src/zeebe/lib/interfaces-1.0.ts index 31719fac..b3070467 100644 --- a/src/zeebe/lib/interfaces-1.0.ts +++ b/src/zeebe/lib/interfaces-1.0.ts @@ -1,9 +1,12 @@ import { ClientReadableStream } from '@grpc/grpc-js' import { Chalk } from 'chalk' +import { Response } from 'got' +import { LosslessNumber } from 'lossless-json' import { MaybeTimeDuration } from 'typed-duration' import { GrpcClient } from './GrpcClient' import { + ActivateInstruction, ActivateJobsRequest, BroadcastSignalRequest, BroadcastSignalResponse, @@ -20,6 +23,7 @@ import { FailJobRequest, MigrateProcessInstanceRequest, MigrateProcessInstanceResponse, + MigrationPlan, ModifyProcessInstanceRequest, ModifyProcessInstanceResponse, ProcessInstanceCreationStartInstruction, @@ -28,6 +32,7 @@ import { ResolveIncidentRequest, SetVariablesRequestOnTheWire, StreamActivatedJobsRequest, + TerminateInstruction, ThrowErrorRequest, TopologyResponse, UpdateJobRetriesRequest, @@ -35,7 +40,7 @@ import { } from './interfaces-grpc-1.0' import { Loglevel, ZBCustomLogger } from './interfaces-published-contract' -// The JSON-stringified version of this is sent to the ZBCustomLogger +/** The JSON-stringified version of this is sent to the ZBCustomLogger */ export interface ZBLogMessage { timestamp: Date context: string @@ -47,11 +52,11 @@ export interface ZBLogMessage { export interface CreateProcessBaseRequest { /** - * the BPMN process ID of the process definition + * The BPMN process ID of the process definition */ bpmnProcessId: string /** - * the version of the process; if not specified it will use the latest version + * The version of the process; if not specified it will use the latest version */ version?: number /** @@ -59,8 +64,10 @@ export interface CreateProcessBaseRequest { * process instance. */ variables: V - /** The tenantId for a multi-tenant enabled cluster. */ + /** The `tenantId` for a multi-tenant enabled cluster. */ tenantId?: string + /** A reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: number | LosslessNumber } export interface CreateProcessInstanceReq @@ -76,12 +83,12 @@ export interface CreateProcessInstanceReq export interface CreateProcessInstanceWithResultReq extends CreateProcessBaseRequest { /** - * timeout in milliseconds. the request will be closed if the process is not completed before the requestTimeout. + * Timeout in milliseconds. the request will be closed if the process is not completed before the requestTimeout. * if requestTimeout = 0, uses the generic requestTimeout configured in the gateway. */ requestTimeout?: number /** - * list of names of variables to be included in `CreateProcessInstanceWithResultResponse.variables`. + * List of names of variables to be included in `CreateProcessInstanceWithResultResponse.variables`. * If empty, all visible variables in the root scope will be returned. */ fetchVariables?: string[] @@ -133,6 +140,10 @@ export interface JobFailureConfiguration { * Optional backoff for subsequent retries, in milliseconds. If not specified, it is zero. */ retryBackOff?: number + /** + * Optional variable update for the job + */ + variables?: JSONDoc } declare function FailureHandler( @@ -189,6 +200,48 @@ export interface JobCompletionInterface { error: ErrorJobOutcome } +export interface JobCompletionInterfaceRest { + /** + * Cancel the workflow. + */ + cancelWorkflow: () => Promise + /** + * Complete the job with a success, optionally passing in a state update to merge + * with the process variables on the broker. + */ + complete: ( + updatedVariables?: WorkerOutputVariables + ) => Promise + /** + * Fail the job with an informative message as to the cause. Optionally, pass in a + * value remaining retries. If no value is passed for retries then the current retry + * count is decremented. Pass in `0`for retries to raise an incident in Operate. Optionally, + * specify a retry backoff period in milliseconds. Default is 0ms (immediate retry) if not + * specified. + */ + fail: typeof FailureHandler + /** + * Mark this job as forwarded to another system for completion. No action is taken by the broker. + * This method releases worker capacity to handle another job. + */ + forward: () => JOB_ACTION_ACKNOWLEDGEMENT + /** + * + * Report a business error (i.e. non-technical) that occurs while processing a job. + * The error is handled in the process by an error catch event. + * If there is no error catch event with the specified errorCode then an incident will be raised instead. + */ + error: (error: ErrorJobWithVariables) => Promise + /** + * Extend the timeout for the job by setting a new timeout + */ + modifyJobTimeout: ({ + newTimeoutMs, + }: { + newTimeoutMs: number + }) => Promise> +} + export interface ZeebeJob< WorkerInputVariables = IInputVariables, CustomHeaderShape = ICustomHeaders, @@ -263,14 +316,14 @@ export interface Job< readonly worker: string /* The amount of retries left to this job (should always be positive) */ readonly retries: number - // epoch milliseconds + /** Epoch milliseconds */ readonly deadline: string /** * All visible variables in the task scope, computed at activation time. */ readonly variables: Readonly /** - * TenantId of the job in a multi-tenant cluster + * The `tenantId` of the job in a multi-tenant cluster */ readonly tenantId: string } @@ -378,28 +431,73 @@ export interface ZBWorkerConfig< // eslint-disable-next-line @typescript-eslint/no-explicit-any customHeadersDto?: { new (...args: any[]): Readonly } /** - * An optional array of tenantIds if you want this to be a multi-tenant worker. + * An optional array of `tenantId`s if you want this to be a multi-tenant worker. */ tenantIds?: string[] } export interface BroadcastSignalReq { - // The name of the signal + /** The name of the signal */ signalName: string - // the signal variables as a JSON document; to be valid, the root of the document must be an - // object, e.g. { "a": "foo" }. [ "foo" ] would not be valid. + /** + * The signal variables as a JSON document; to be valid, the root of the document must be an object, e.g. { "a": "foo" }. [ "foo" ] would not be valid. + */ variables?: JSONDoc - // Optional tenantId for a multi-tenant enabled cluster. This could also be supplied via environment variable. + /** Optional `tenantId` for a multi-tenant enabled cluster. This could also be supplied via environment variable. */ tenantId?: string } export interface BroadcastSignalRes { - // the unique ID of the signal that was broadcasted. + /** The unique ID of the signal that was broadcasted. */ key: string } +export interface ResolveIncidentReq { + readonly incidentKey: string + /** A reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: number | LosslessNumber +} + +export interface UpdateJobRetriesReq { + readonly jobKey: string + retries: number + /** A reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: number | LosslessNumber +} + +export interface UpdateJobTimeoutReq { + readonly jobKey: string + /** The duration of the new timeout in ms, starting from the current moment */ + timeout: number + /** A reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: number | LosslessNumber +} + +export interface ModifyProcessInstanceReq { + /** The key of the process instance that should be modified */ + processInstanceKey: string + /** + * Instructions describing which elements should be activated in which scopes, + * and which variables should be created + */ + activateInstructions?: ActivateInstruction[] + /** Instructions describing which elements should be terminated */ + terminateInstructions?: TerminateInstruction[] + /** A reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: number | LosslessNumber +} + +export interface MigrateProcessInstanceReq { + /** Key of the process instance to migrate */ + processInstanceKey: string + /** The migration plan that defines target process and element mappings */ + migrationPlan: MigrationPlan + /** A reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: number | LosslessNumber +} + export interface ZBGrpc extends GrpcClient { completeJobSync: (req: CompleteJobRequest) => Promise activateJobsStream: ( @@ -434,6 +532,7 @@ export interface ZBGrpc extends GrpcClient { ): Promise cancelProcessInstanceSync(processInstanceKey: { processInstanceKey: string | number + operationReference?: string }): Promise migrateProcessInstanceSync( request: MigrateProcessInstanceRequest diff --git a/src/zeebe/lib/interfaces-grpc-1.0.ts b/src/zeebe/lib/interfaces-grpc-1.0.ts index 16b4f89b..545c32ae 100644 --- a/src/zeebe/lib/interfaces-grpc-1.0.ts +++ b/src/zeebe/lib/interfaces-grpc-1.0.ts @@ -1,3 +1,4 @@ +import { LosslessNumber } from 'lossless-json' import { MaybeTimeDuration } from 'typed-duration' import { IInputVariables, IProcessVariables, JSONDoc } from './interfaces-1.0' @@ -60,7 +61,7 @@ export interface ActivateJobsRequest { * To immediately complete the request when no job is activated set the requestTimeout to a negative value * */ - requestTimeout: MaybeTimeDuration + requestTimeout?: MaybeTimeDuration /** * a list of IDs of tenants for which to activate jobs */ @@ -142,6 +143,8 @@ export interface CreateProcessInstanceRequest * instructions after it has been created */ startInstructions: ProcessInstanceCreationStartInstruction[] + /** a reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: string } export interface ProcessInstanceCreationStartInstruction { @@ -463,12 +466,16 @@ export interface PublishStartMessageRequest { export interface UpdateJobRetriesRequest { readonly jobKey: string retries: number + /** a reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: string } export interface UpdateJobTimeoutRequest { readonly jobKey: string /** the duration of the new timeout in ms, starting from the current moment */ timeout: number + /** a reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: string } export interface FailJobRequest { @@ -476,6 +483,7 @@ export interface FailJobRequest { retries: number errorMessage: string retryBackOff: number + variables?: JSONDoc } export interface ThrowErrorRequest { @@ -521,14 +529,19 @@ interface SetVariablesRequestBase { export interface SetVariablesRequest extends SetVariablesRequestBase { variables: Partial + /** a reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: number | LosslessNumber } export interface SetVariablesRequestOnTheWire extends SetVariablesRequestBase { variables: string + operationReference?: string } export interface ResolveIncidentRequest { readonly incidentKey: string + /** a reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: string } export interface ActivateInstruction { @@ -575,6 +588,8 @@ export interface ModifyProcessInstanceRequest { activateInstructions?: ActivateInstruction[] /** instructions describing which elements should be terminated */ terminateInstructions?: TerminateInstruction[] + /** a reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: string } export type ModifyProcessInstanceResponse = Record @@ -584,16 +599,18 @@ export interface MigrateProcessInstanceRequest { processInstanceKey: string // the migration plan that defines target process and element mappings migrationPlan: MigrationPlan + /** a reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: string } -interface MigrationPlan { +export interface MigrationPlan { // the key of process definition to migrate the process instance to targetProcessDefinitionKey: string // the mapping instructions describe how to map elements from the source process definition to the target process definition mappingInstructions: MappingInstruction[] } -interface MappingInstruction { +export interface MappingInstruction { // the element id to migrate from sourceElementId: string // the element id to migrate into @@ -743,6 +760,8 @@ export interface DeleteResourceRequest { * of a process definition, the key of a decision requirements definition or the key of a form. */ resourceKey: string + /** a reference key chosen by the user and will be part of all records resulted from this operation */ + operationReference?: number | LosslessNumber } export interface BroadcastSignalRequest { diff --git a/src/zeebe/lib/stringifyVariables.ts b/src/zeebe/lib/stringifyVariables.ts index 57770490..216102a9 100644 --- a/src/zeebe/lib/stringifyVariables.ts +++ b/src/zeebe/lib/stringifyVariables.ts @@ -11,11 +11,14 @@ export function parseVariables( }) } +/** + * Parse an incoming job and convert its variables and custom headers to JSON. + */ + export function parseVariablesAndCustomHeadersToJSON( response: ActivatedJob, - // eslint-disable-next-line @typescript-eslint/no-explicit-any + /* eslint-disable @typescript-eslint/no-explicit-any */ inputVariableDto: new (...args: any[]) => Readonly, - // eslint-disable-next-line @typescript-eslint/no-explicit-any customHeadersDto: new (...args: any[]) => Readonly ): Promise> { return new Promise((resolve, reject) => { @@ -40,12 +43,25 @@ export function parseVariablesAndCustomHeadersToJSON( }) } +/** + * Turn the `variables` field of a request from a JS object to a JSON string + * This should be a key:value object where the keys will be variable names in Zeebe and the values are the corresponding values. + * This function is used when sending a job back to Zeebe. + */ export function stringifyVariables< K, T extends { variables: K extends JSONDoc ? K : K }, V extends T & { variables: string }, >(request: T): V { const variables = request.variables || {} + /** + * This is a run-time guard. The type system disallows passing an array, but type erasure and dynamic programming can override that. + * If you pass an array as the variables to a CompleteJob RPC call, it will report success, but fail on the broker, stalling the process. + * See: https://github.com/camunda/camunda-8-js-sdk/issues/247 + */ + if (Array.isArray(variables)) { + throw new Error('Unable to parse Array into variables') + } const variablesString = losslessStringify(variables) return Object.assign({}, request, { variables: variablesString }) as V } diff --git a/src/zeebe/zb/ZeebeGrpcClient.ts b/src/zeebe/zb/ZeebeGrpcClient.ts index 0e28649a..bfeda8e9 100644 --- a/src/zeebe/zb/ZeebeGrpcClient.ts +++ b/src/zeebe/zb/ZeebeGrpcClient.ts @@ -3,6 +3,7 @@ import * as path from 'path' import chalk from 'chalk' import d from 'debug' +import { LosslessNumber } from 'lossless-json' import promiseRetry from 'promise-retry' import { Duration, MaybeTimeDuration } from 'typed-duration' import { v4 as uuid } from 'uuid' @@ -10,12 +11,12 @@ import { v4 as uuid } from 'uuid' import { CamundaEnvironmentConfigurator, CamundaPlatform8Configuration, + constructOAuthProvider, DeepPartial, GetCustomCertificateBuffer, LosslessDto, - RequireConfiguration, - constructOAuthProvider, losslessStringify, + RequireConfiguration, } from '../../lib' import { IOAuthProvider } from '../../oauth' import { @@ -33,7 +34,7 @@ import { StatefulLogInterceptor } from '../lib/StatefulLogInterceptor' import { TypedEmitter } from '../lib/TypedEmitter' import { ZBJsonLogger } from '../lib/ZBJsonLogger' import { ZBStreamWorker } from '../lib/ZBStreamWorker' -import { Resource, getResourceContentAndName } from '../lib/deployResource' +import { getResourceContentAndName, Resource } from '../lib/deployResource' import * as ZB from '../lib/interfaces-1.0' import { ZBWorkerTaskHandler } from '../lib/interfaces-1.0' import * as Grpc from '../lib/interfaces-grpc-1.0' @@ -268,14 +269,12 @@ export class ZeebeGrpcClient extends TypedEmitter< const inputVariableDtoToUse = inputVariableDto ?? (LosslessDto as { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - new (obj: any): Variables + new (): Variables }) const customHeadersDtoToUse = customHeadersDto ?? (LosslessDto as { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - new (obj: any): CustomHeaders + new (): CustomHeaders }) // eslint-disable-next-line no-async-promise-executor return new Promise(async (resolve, reject) => { @@ -350,12 +349,15 @@ export class ZeebeGrpcClient extends TypedEmitter< * ``` */ public async cancelProcessInstance( - processInstanceKey: string | number + processInstanceKey: string, + operationReference?: number | LosslessNumber ): Promise { Utils.validateNumber(processInstanceKey, 'processInstanceKey') + const parsedOperationReference = operationReference?.toString() ?? undefined return this.executeOperation('cancelProcessInstance', async () => (await this.grpc).cancelProcessInstanceSync({ processInstanceKey, + operationReference: parsedOperationReference, }) ) } @@ -582,6 +584,8 @@ export class ZeebeGrpcClient extends TypedEmitter< >( config: ZB.CreateProcessInstanceReq ): Promise { + const operationReference = + config.operationReference?.toString() ?? undefined const request: ZB.CreateProcessInstanceReq = { bpmnProcessId: config.bpmnProcessId, variables: config.variables, @@ -592,6 +596,7 @@ export class ZeebeGrpcClient extends TypedEmitter< const createProcessInstanceRequest: Grpc.CreateProcessInstanceRequest = stringifyVariables({ ...request, + operationReference, startInstructions: request.startInstructions!, tenantId: config.tenantId ?? this.tenantId, }) @@ -662,11 +667,13 @@ export class ZeebeGrpcClient extends TypedEmitter< */ deleteResource({ resourceKey, + operationReference, }: { resourceKey: string + operationReference?: number | LosslessNumber }): Promise> { return this.executeOperation('deleteResourceSync', async () => - (await this.grpc).deleteResourceSync({ resourceKey }) + (await this.grpc).deleteResourceSync({ resourceKey, operationReference }) ) } @@ -827,8 +834,14 @@ export class ZeebeGrpcClient extends TypedEmitter< * ``` */ public failJob(failJobRequest: Grpc.FailJobRequest): Promise { + const variables = failJobRequest.variables ? failJobRequest.variables : {} + const withStringifiedVariables = stringifyVariables({ + ...failJobRequest, + variables, + }) + return this.executeOperation('failJob', async () => - (await this.grpc).failJobSync(failJobRequest) + (await this.grpc).failJobSync(withStringifiedVariables) ) } @@ -869,8 +882,10 @@ export class ZeebeGrpcClient extends TypedEmitter< * ``` */ public modifyProcessInstance( - modifyProcessInstanceRequest: Grpc.ModifyProcessInstanceRequest + modifyProcessInstanceRequest: ZB.ModifyProcessInstanceReq ): Promise { + const operationReference = + modifyProcessInstanceRequest.operationReference?.toString() return this.executeOperation('modifyProcessInstance', async () => { // We accept JSONDoc for the variableInstructions, but the actual gRPC call needs stringified JSON, so transform it with a mutation const req = Utils.deepClone(modifyProcessInstanceRequest) @@ -881,6 +896,7 @@ export class ZeebeGrpcClient extends TypedEmitter< ) return (await this.grpc).modifyProcessInstanceSync({ ...req, + operationReference, }) }) } @@ -890,12 +906,15 @@ export class ZeebeGrpcClient extends TypedEmitter< * @since 8.5.0 */ public migrateProcessInstance( - migrateProcessInstanceRequest: Grpc.MigrateProcessInstanceRequest + migrateProcessInstanceRequest: ZB.MigrateProcessInstanceReq ): Promise { + const operationReference = + migrateProcessInstanceRequest.operationReference?.toString() return this.executeOperation('migrateProcessInstance', async () => - (await this.grpc).migrateProcessInstanceSync( - migrateProcessInstanceRequest - ) + (await this.grpc).migrateProcessInstanceSync({ + ...migrateProcessInstanceRequest, + operationReference, + }) ) } @@ -1027,10 +1046,15 @@ export class ZeebeGrpcClient extends TypedEmitter< * ``` */ public resolveIncident( - resolveIncidentRequest: Grpc.ResolveIncidentRequest + resolveIncidentRequest: ZB.ResolveIncidentReq ): Promise { + const operationReference = + resolveIncidentRequest.operationReference?.toString() return this.executeOperation('resolveIncident', async () => - (await this.grpc).resolveIncidentSync(resolveIncidentRequest) + (await this.grpc).resolveIncidentSync({ + ...resolveIncidentRequest, + operationReference, + }) ) } @@ -1080,8 +1104,13 @@ export class ZeebeGrpcClient extends TypedEmitter< ? losslessStringify(request.variables) : request.variables + const operationReference = request.operationReference?.toString() return this.executeOperation('setVariables', async () => - (await this.grpc).setVariablesSync({ ...request, variables }) + (await this.grpc).setVariablesSync({ + ...request, + variables, + operationReference, + }) ) } @@ -1193,14 +1222,12 @@ export class ZeebeGrpcClient extends TypedEmitter< const inputVariableDto = req.inputVariableDto ? req.inputVariableDto : (LosslessDto as { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - new (obj: any): WorkerInputVariables + new (): WorkerInputVariables }) const customHeadersDto = req.customHeadersDto ? req.customHeadersDto : (LosslessDto as { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - new (obj: any): CustomHeaderShape + new (): CustomHeaderShape }) const fetchVariable = req.fetchVariables delete req.fetchVariables @@ -1315,10 +1342,15 @@ export class ZeebeGrpcClient extends TypedEmitter< * ``` */ public updateJobRetries( - updateJobRetriesRequest: Grpc.UpdateJobRetriesRequest + updateJobRetriesRequest: ZB.UpdateJobRetriesReq ): Promise { + const operationReference = + updateJobRetriesRequest.operationReference?.toString() return this.executeOperation('updateJobRetries', async () => - (await this.grpc).updateJobRetriesSync(updateJobRetriesRequest) + (await this.grpc).updateJobRetriesSync({ + ...updateJobRetriesRequest, + operationReference, + }) ) } @@ -1334,10 +1366,15 @@ export class ZeebeGrpcClient extends TypedEmitter< - no deadline exists for the given job key */ public updateJobTimeout( - updateJobTimeoutRequest: Grpc.UpdateJobTimeoutRequest + updateJobTimeoutRequest: ZB.UpdateJobTimeoutReq ): Promise { + const operationReference = + updateJobTimeoutRequest.operationReference?.toString() return this.executeOperation('updateJobTimeout', async () => - (await this.grpc).updateJobTimeoutSync(updateJobTimeoutRequest) + (await this.grpc).updateJobTimeoutSync({ + ...updateJobTimeoutRequest, + operationReference, + }) ) } diff --git a/src/zeebe/zb/ZeebeRESTClient.ts b/src/zeebe/zb/ZeebeRESTClient.ts index a0e41e84..28d73093 100644 --- a/src/zeebe/zb/ZeebeRESTClient.ts +++ b/src/zeebe/zb/ZeebeRESTClient.ts @@ -35,6 +35,9 @@ interface TaskChangeSet { candidateGroups?: string[] } +/** + * @deprecated Since 8.6. Please use `C8RestClient` instead. + */ export class ZeebeRestClient { private userAgentString: string private oAuthProvider: IOAuthProvider