diff --git a/.changeset/early-starfishes-impress.md b/.changeset/early-starfishes-impress.md deleted file mode 100644 index 0995ac3593..0000000000 --- a/.changeset/early-starfishes-impress.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@aws-amplify/ai-constructs': patch ---- - -Fix case where bedrock content blocks would be populated with 'null' instead of 'undefined. diff --git a/.changeset/eleven-numbers-hide.md b/.changeset/eleven-numbers-hide.md new file mode 100644 index 0000000000..9e91b8a237 --- /dev/null +++ b/.changeset/eleven-numbers-hide.md @@ -0,0 +1,5 @@ +--- +'@aws-amplify/backend-deployer': patch +--- + +Handle errors when checking CDK bootstrap. diff --git a/.changeset/modern-toys-jump.md b/.changeset/modern-toys-jump.md deleted file mode 100644 index 6f13875f25..0000000000 --- a/.changeset/modern-toys-jump.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@aws-amplify/backend-deployer': patch ---- - -detect more generic CFN deployment failure errors diff --git a/.eslint_dictionary.json b/.eslint_dictionary.json index 2e0a49ae60..cd255e5445 100644 --- a/.eslint_dictionary.json +++ b/.eslint_dictionary.json @@ -19,6 +19,7 @@ "birthdate", "bundler", "callee", + "cartesian", "cdk", "changelog", "changeset", @@ -39,6 +40,7 @@ "datasync", "debounce", "declarator", + "decrypt", "deployer", "deprecations", "deprecator", @@ -79,6 +81,7 @@ "idps", "implementors", "inheritdoc", + "instanceof", "interop", "invokable", "invoker", @@ -145,6 +148,7 @@ "sigint", "signout", "signup", + "SKey", "sms", "stderr", "stdin", @@ -158,6 +162,7 @@ "synthing", "testname", "testnamebucket", + "testuser", "timestamps", "tmpdir", "todos", @@ -170,6 +175,7 @@ "tslint", "typename", "typeof", + "ubuntu", "unauth", "unix", "unlink", @@ -188,6 +194,7 @@ "wildcards", "workspace", "writev", + "xlarge", "yaml", "yargs", "zoneinfo" diff --git a/.eslintrc.cjs b/.eslintrc.cjs index 05f9f0a155..23e09de517 100644 --- a/.eslintrc.cjs +++ b/.eslintrc.cjs @@ -160,6 +160,7 @@ module.exports = { }, ], 'jsdoc/require-param': 'off', + 'jsdoc/require-yields': 'off', 'jsdoc/require-returns': 'off', 'spellcheck/spell-checker': [ 'warn', diff --git a/.github/actions/setup_node/action.yml b/.github/actions/setup_node/action.yml index 6bda314a87..8f407ee437 100644 --- a/.github/actions/setup_node/action.yml +++ b/.github/actions/setup_node/action.yml @@ -12,3 +12,10 @@ runs: with: node-version: ${{ inputs.node-version }} cache: 'npm' + - name: Hydrate npx cache + # This step hydrates npx cache with packages that we use in builds and tests upfront. + # Otherwise, concurrent attempt to use these tools with cache miss results in race conditions between + # two installations. That may result in corrupted npx cache. + shell: bash + run: | + npx which npx diff --git a/.github/workflows/health_checks.yml b/.github/workflows/health_checks.yml index e9aafec01a..fef7c581b8 100644 --- a/.github/workflows/health_checks.yml +++ b/.github/workflows/health_checks.yml @@ -206,25 +206,34 @@ jobs: run: npm run test:dir packages/integration-tests/lib/test-e2e/amplify_outputs_backwards_compatibility.test.js env: BASELINE_DIR: ${{ steps.setup_baseline_version.outputs.baseline_dir }} + e2e_generate_deployment_tests_matrix: + if: needs.do_include_e2e.outputs.run_e2e == 'true' + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.generateMatrix.outputs.matrix }} + timeout-minutes: 5 + needs: + - do_include_e2e + - build + steps: + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # version 4.1.4 + - uses: ./.github/actions/restore_build_cache + - run: echo "$(npx tsx scripts/generate_sparse_test_matrix.ts 'packages/integration-tests/lib/test-e2e/deployment/*.deployment.test.js')" + - id: generateMatrix + run: echo "matrix=$(npx tsx scripts/generate_sparse_test_matrix.ts 'packages/integration-tests/lib/test-e2e/deployment/*.deployment.test.js')" >> "$GITHUB_OUTPUT" e2e_deployment: if: needs.do_include_e2e.outputs.run_e2e == 'true' strategy: # will finish running other test matrices even if one fails fail-fast: false - matrix: - os: [ubuntu-latest, macos-14-xlarge, windows-latest] - node-version: [18, 20] - # skip multiple node version test on other os - exclude: - - os: macos-14-xlarge - node-version: 20 - - os: windows-latest - node-version: 20 + matrix: ${{ fromJson(needs.e2e_generate_deployment_tests_matrix.outputs.matrix) }} runs-on: ${{ matrix.os }} + name: e2e_deployment ${{ matrix.displayNames }} ${{ matrix.node-version }} ${{ matrix.os }} timeout-minutes: ${{ matrix.os == 'windows-latest' && 35 || 25 }} needs: - do_include_e2e - build + - e2e_generate_deployment_tests_matrix permissions: # these permissions are required for the configure-aws-credentials action to get a JWT from GitHub id-token: write @@ -238,26 +247,35 @@ jobs: node_version: ${{ matrix.node-version }} link_cli: true run: | - npm run test:dir packages/integration-tests/lib/test-e2e/deployment.test.js + npm run test:dir ${{ matrix.testPaths }} + e2e_generate_sandbox_tests_matrix: + if: needs.do_include_e2e.outputs.run_e2e == 'true' + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.generateMatrix.outputs.matrix }} + timeout-minutes: 5 + needs: + - do_include_e2e + - build + steps: + - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # version 4.1.4 + - uses: ./.github/actions/restore_build_cache + - run: echo "$(npx tsx scripts/generate_sparse_test_matrix.ts 'packages/integration-tests/lib/test-e2e/sandbox/*.sandbox.test.js')" + - id: generateMatrix + run: echo "matrix=$(npx tsx scripts/generate_sparse_test_matrix.ts 'packages/integration-tests/lib/test-e2e/sandbox/*.sandbox.test.js')" >> "$GITHUB_OUTPUT" e2e_sandbox: if: needs.do_include_e2e.outputs.run_e2e == 'true' strategy: # will finish running other test matrices even if one fails fail-fast: false - matrix: - os: [ubuntu-latest, macos-14-xlarge, windows-latest] - node-version: [18, 20] - # skip multiple node version test on other os - exclude: - - os: macos-14-xlarge - node-version: 20 - - os: windows-latest - node-version: 20 + matrix: ${{ fromJson(needs.e2e_generate_sandbox_tests_matrix.outputs.matrix) }} runs-on: ${{ matrix.os }} + name: e2e_sandbox ${{ matrix.displayNames }} ${{ matrix.node-version }} ${{ matrix.os }} timeout-minutes: ${{ matrix.os == 'windows-latest' && 35 || 25 }} needs: - do_include_e2e - build + - e2e_generate_sandbox_tests_matrix permissions: # these permissions are required for the configure-aws-credentials action to get a JWT from GitHub id-token: write @@ -270,7 +288,7 @@ jobs: e2e_test_accounts: ${{ vars.E2E_TEST_ACCOUNTS }} node_version: ${{ matrix.node-version }} link_cli: true - run: npm run test:dir packages/integration-tests/lib/test-e2e/sandbox.test.js + run: npm run test:dir ${{ matrix.testPaths }} e2e_backend_output: if: needs.do_include_e2e.outputs.run_e2e == 'true' runs-on: ubuntu-latest @@ -411,7 +429,9 @@ jobs: - uses: ./.github/actions/setup_node - uses: ./.github/actions/restore_install_cache - run: git fetch origin - - run: npm run diff:check ${{ github.event.pull_request.base.sha }} + - run: npm run diff:check "$BASE_SHA" + env: + BASE_SHA: ${{ github.event.pull_request.base.sha }} check_pr_changesets: if: github.event_name == 'pull_request' && github.event.pull_request.user.login != 'github-actions[bot]' runs-on: ubuntu-latest @@ -425,9 +445,13 @@ jobs: - uses: ./.github/actions/setup_node - uses: ./.github/actions/restore_install_cache - name: Validate that PR has changeset - run: npx changeset status --since origin/${{ github.event.pull_request.base.ref }} + run: npx changeset status --since origin/"$BASE_REF" + env: + BASE_REF: ${{ github.event.pull_request.base.ref }} - name: Validate changeset is not missing packages - run: npx tsx scripts/check_changeset_completeness.ts ${{ github.event.pull_request.base.sha }} + run: npx tsx scripts/check_changeset_completeness.ts "$BASE_SHA" + env: + BASE_SHA: ${{ github.event.pull_request.base.sha }} - name: Validate that changeset has necessary dependency updates run: | npx changeset version diff --git a/package-lock.json b/package-lock.json index 4c4620c988..fb89704326 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16,6 +16,7 @@ "@actions/github": "^6.0.0", "@aws-sdk/client-amplify": "^3.624.0", "@aws-sdk/client-cloudformation": "^3.624.0", + "@aws-sdk/client-cloudwatch-logs": "^3.624.0", "@aws-sdk/client-cognito-identity-provider": "^3.624.0", "@aws-sdk/client-dynamodb": "^3.624.0", "@aws-sdk/client-iam": "^3.624.0", @@ -48,14 +49,14 @@ "fs-extra": "^11.1.1", "glob": "^10.1.0", "husky": "^8.0.3", - "lint-staged": "^13.2.1", + "lint-staged": "^15.2.10", "prettier": "^2.8.7", "rimraf": "^5.0.0", "semver": "^7.5.4", "tsx": "^4.6.1", "typedoc": "^0.25.3", "typescript": "~5.2.0", - "verdaccio": "^5.24.1" + "verdaccio": "^6.0.1" }, "engines": { "node": ">=18.16.0" @@ -2796,10 +2797,9 @@ } }, "node_modules/@aws-amplify/data-schema-types": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@aws-amplify/data-schema-types/-/data-schema-types-1.1.1.tgz", - "integrity": "sha512-WhWEEsztpSSxIY0lJ3Ge5iA4g3PBm66SQmy1fBH1FBq0T+cxUBijifOU8MNwf+tf6lGpArMX0RS54HRVF5fUSA==", - "license": "Apache-2.0", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@aws-amplify/data-schema-types/-/data-schema-types-1.2.0.tgz", + "integrity": "sha512-1hy2r7jl3hQ5J/CGjhmPhFPcdGSakfme1ZLjlTMJZILfYifZLSlGRKNCelMb3J5N9203hyeT5XDi5yR47JL1TQ==", "dependencies": { "graphql": "15.8.0", "rxjs": "^7.8.1" @@ -5070,13 +5070,13 @@ "license": "0BSD" }, "node_modules/@aws-amplify/graphql-schema-generator": { - "version": "0.9.4", - "resolved": "https://registry.npmjs.org/@aws-amplify/graphql-schema-generator/-/graphql-schema-generator-0.9.4.tgz", - "integrity": "sha512-GXoPOes5Sj93p7RWunJlMdxPQyoh+dBaJq3qpQUOSYQU1UxUqAstnD+gqAWEG58opiupHby7jTIi1ljK1e9CrQ==", + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@aws-amplify/graphql-schema-generator/-/graphql-schema-generator-0.11.0.tgz", + "integrity": "sha512-c5pDuoh8UWD0qQ2N4HjR3ZC/JO6ai8DrsK40oQKwQhG2V/VkxUGdqsg0B9nYiKepxiTw0gXabLq8JfwW4o8uBg==", "license": "Apache-2.0", "dependencies": { - "@aws-amplify/graphql-transformer-core": "2.9.3", - "@aws-amplify/graphql-transformer-interfaces": "3.10.1", + "@aws-amplify/graphql-transformer-core": "3.2.2", + "@aws-amplify/graphql-transformer-interfaces": "4.1.2", "@aws-sdk/client-ec2": "3.624.0", "@aws-sdk/client-iam": "3.624.0", "@aws-sdk/client-lambda": "3.624.0", @@ -5084,7 +5084,7 @@ "csv-parse": "^5.5.2", "fs-extra": "11.1.1", "graphql": "^15.5.0", - "graphql-transformer-common": "4.31.1", + "graphql-transformer-common": "5.1.0", "knex": "~2.4.0", "mysql2": "~3.9.7", "ora": "^4.0.3", @@ -5712,6 +5712,24 @@ "node": ">=14.14" } }, + "node_modules/@aws-amplify/graphql-schema-generator/node_modules/graphql-mapping-template": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/graphql-mapping-template/-/graphql-mapping-template-5.0.1.tgz", + "integrity": "sha512-hgFkXUS6Q35zE/uyPGIZYof2kutwTZmVqwJfnQofiCYWRRQS0zjzUdyqmOcCBkbJB4Zi7G7mXcl3fSIs5I5vgA==", + "license": "Apache-2.0" + }, + "node_modules/@aws-amplify/graphql-schema-generator/node_modules/graphql-transformer-common": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/graphql-transformer-common/-/graphql-transformer-common-5.1.0.tgz", + "integrity": "sha512-i1Ja0bjlsrSNT5TzjGOrPyxYGJPTutDOLTJENcGC47+KYzMfQS80KpVpUZlIVlcCbDYeSZbv8HaMtJlJpmjbmw==", + "license": "Apache-2.0", + "dependencies": { + "graphql": "^15.5.0", + "graphql-mapping-template": "5.0.1", + "md5": "^2.2.1", + "pluralize": "8.0.0" + } + }, "node_modules/@aws-amplify/graphql-schema-generator/node_modules/typescript": { "version": "4.9.5", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", @@ -5726,17 +5744,17 @@ } }, "node_modules/@aws-amplify/graphql-transformer-core": { - "version": "2.9.3", - "resolved": "https://registry.npmjs.org/@aws-amplify/graphql-transformer-core/-/graphql-transformer-core-2.9.3.tgz", - "integrity": "sha512-gz9PbNTqsyQQn6W5d4HPN/pafvFH7spwd6R/hImisEBFD+80liJc/21nBC8UgUMPu2eXVZrsiWBfWnO8Rbqomg==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@aws-amplify/graphql-transformer-core/-/graphql-transformer-core-3.2.2.tgz", + "integrity": "sha512-nHocW0Uy/pHrrt5iMFMzz+9IsJKnaPk9BcWZHcQSJ/9F0Kn0s/vIFT5/Ee2nJFN/h0VK3fTkT9QKOuiQ4UH3Jg==", "license": "Apache-2.0", "dependencies": { - "@aws-amplify/graphql-directives": "1.1.0", - "@aws-amplify/graphql-transformer-interfaces": "3.10.1", + "@aws-amplify/graphql-directives": "2.4.0", + "@aws-amplify/graphql-transformer-interfaces": "4.1.2", "fs-extra": "^8.1.0", "graphql": "^15.5.0", - "graphql-mapping-template": "4.20.16", - "graphql-transformer-common": "4.31.1", + "graphql-mapping-template": "5.0.1", + "graphql-transformer-common": "5.1.0", "hjson": "^3.2.2", "lodash": "^4.17.21", "md5": "^2.3.0", @@ -5744,10 +5762,16 @@ "ts-dedent": "^2.0.0" }, "peerDependencies": { - "aws-cdk-lib": "^2.129.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.3.0" } }, + "node_modules/@aws-amplify/graphql-transformer-core/node_modules/@aws-amplify/graphql-directives": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@aws-amplify/graphql-directives/-/graphql-directives-2.4.0.tgz", + "integrity": "sha512-+oO9Lb22eIuS8rvLOR+x4F79J5aCF1GIkqYS0paRUTw78NjLTOq1LWjtGMYAfLpbHgoYtrkC2zwpw7sHbmNnzQ==", + "license": "Apache-2.0" + }, "node_modules/@aws-amplify/graphql-transformer-core/node_modules/fs-extra": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", @@ -5762,6 +5786,24 @@ "node": ">=6 <7 || >=8" } }, + "node_modules/@aws-amplify/graphql-transformer-core/node_modules/graphql-mapping-template": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/graphql-mapping-template/-/graphql-mapping-template-5.0.1.tgz", + "integrity": "sha512-hgFkXUS6Q35zE/uyPGIZYof2kutwTZmVqwJfnQofiCYWRRQS0zjzUdyqmOcCBkbJB4Zi7G7mXcl3fSIs5I5vgA==", + "license": "Apache-2.0" + }, + "node_modules/@aws-amplify/graphql-transformer-core/node_modules/graphql-transformer-common": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/graphql-transformer-common/-/graphql-transformer-common-5.1.0.tgz", + "integrity": "sha512-i1Ja0bjlsrSNT5TzjGOrPyxYGJPTutDOLTJENcGC47+KYzMfQS80KpVpUZlIVlcCbDYeSZbv8HaMtJlJpmjbmw==", + "license": "Apache-2.0", + "dependencies": { + "graphql": "^15.5.0", + "graphql-mapping-template": "5.0.1", + "md5": "^2.2.1", + "pluralize": "8.0.0" + } + }, "node_modules/@aws-amplify/graphql-transformer-core/node_modules/jsonfile": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", @@ -5790,15 +5832,15 @@ } }, "node_modules/@aws-amplify/graphql-transformer-interfaces": { - "version": "3.10.1", - "resolved": "https://registry.npmjs.org/@aws-amplify/graphql-transformer-interfaces/-/graphql-transformer-interfaces-3.10.1.tgz", - "integrity": "sha512-daf+cpOSw3lKiS+Tpc5Oo5H+FCkHi/8z+0mAR/greQGPJWzcHv9j2u1Jiy36UvI01ypOhHme58pAs/fKWLWDBQ==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@aws-amplify/graphql-transformer-interfaces/-/graphql-transformer-interfaces-4.1.2.tgz", + "integrity": "sha512-fW4BIo2stFYOc6LDrSDKW0NTKmBp/c+UJUG5YjDef5fUUTbE8RZMzUGgSjgzDgwXpAT8CyYuncqMLchVkQSFFQ==", "license": "Apache-2.0", "dependencies": { "graphql": "^15.5.0" }, "peerDependencies": { - "aws-cdk-lib": "^2.129.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.3.0" } }, @@ -6078,9 +6120,9 @@ "license": "Apache-2.0" }, "node_modules/@aws-cdk/cloud-assembly-schema": { - "version": "36.0.25", - "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-36.0.25.tgz", - "integrity": "sha512-AK86v4IMV4zcWfp392e3wlaVJPT72/dk39Lo2SDDFxQR+sikMOyY2IGrULyhK1TwQmPiyxM7QB/0MkTbMDAPrw==", + "version": "38.0.1", + "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-38.0.1.tgz", + "integrity": "sha512-KvPe+NMWAulfNVwY7jenFhzhuLhLqJ/OPy5jx7wUstbjnYnjRVLpUHPU3yCjXFE0J8cuJVdx95BJ4rOs66Pi9w==", "bundleDependencies": [ "jsonschema", "semver" @@ -6089,9 +6131,6 @@ "dependencies": { "jsonschema": "^1.4.1", "semver": "^7.6.3" - }, - "engines": { - "node": ">= 18.18.0" } }, "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/jsonschema": { @@ -13803,9 +13842,9 @@ } }, "node_modules/@cypress/request": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@cypress/request/-/request-3.0.1.tgz", - "integrity": "sha512-TWivJlJi8ZDx2wGOw1dbLuHJKUYX7bWySw377nlnGOW3hP9/MUKIsEdXT/YngWxVdgNCHRBmFlBipE+5/2ZZlQ==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-3.0.5.tgz", + "integrity": "sha512-v+XHd9XmWbufxF1/bTaVm2yhbxY+TB4YtWRqF2zaXBlDNMkls34KiATz0AVDLavL3iB6bQk9/7n3oY1EoLSWGA==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -13815,14 +13854,14 @@ "combined-stream": "~1.0.6", "extend": "~3.0.2", "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "http-signature": "~1.3.6", + "form-data": "~4.0.0", + "http-signature": "~1.4.0", "is-typedarray": "~1.0.0", "isstream": "~0.1.2", "json-stringify-safe": "~5.0.1", "mime-types": "~2.1.19", "performance-now": "^2.1.0", - "qs": "6.10.4", + "qs": "6.13.0", "safe-buffer": "^5.1.2", "tough-cookie": "^4.1.3", "tunnel-agent": "^0.6.0", @@ -18154,21 +18193,21 @@ "license": "ISC" }, "node_modules/@verdaccio/auth": { - "version": "8.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/auth/-/auth-8.0.0-next-8.1.tgz", - "integrity": "sha512-sPmHdnYuRSMgABCsTJEfz8tb/smONsWVg0g4KK2QycyYZ/A+RwZLV1JLiQb4wzu9zvS0HSloqWqkWlyNHW3mtw==", + "version": "8.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/auth/-/auth-8.0.0-next-8.3.tgz", + "integrity": "sha512-x7/gt4R41i5hat5dVT2WfwTeWolSKTo3k8t12ZBhnf+R+L/a79dQ7/sR8JIT6R9A+nkkFn+RiPspH75H7lprZg==", "dev": true, "license": "MIT", "dependencies": { - "@verdaccio/config": "8.0.0-next-8.1", - "@verdaccio/core": "8.0.0-next-8.1", - "@verdaccio/loaders": "8.0.0-next-8.1", - "@verdaccio/logger": "8.0.0-next-8.1", - "@verdaccio/signature": "8.0.0-next-8.0", - "@verdaccio/utils": "7.0.1-next-8.1", + "@verdaccio/config": "8.0.0-next-8.3", + "@verdaccio/core": "8.0.0-next-8.3", + "@verdaccio/loaders": "8.0.0-next-8.3", + "@verdaccio/logger": "8.0.0-next-8.3", + "@verdaccio/signature": "8.0.0-next-8.1", + "@verdaccio/utils": "8.1.0-next-8.3", "debug": "4.3.7", "lodash": "4.17.21", - "verdaccio-htpasswd": "13.0.0-next-8.1" + "verdaccio-htpasswd": "13.0.0-next-8.3" }, "engines": { "node": ">=18" @@ -18178,6 +18217,52 @@ "url": "https://opencollective.com/verdaccio" } }, + "node_modules/@verdaccio/auth/node_modules/@verdaccio/utils": { + "version": "8.1.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/utils/-/utils-8.1.0-next-8.3.tgz", + "integrity": "sha512-TR9S+RYpmOERqiqoXwtBFmWqyyByTUFSkZgfDqKbsBaDbYYUls738NKFQHpg/s6i2E3r46mHcWI+jue/EnOoSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@verdaccio/core": "8.0.0-next-8.3", + "lodash": "4.17.21", + "minimatch": "7.4.6", + "semver": "7.6.3" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/verdaccio" + } + }, + "node_modules/@verdaccio/auth/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@verdaccio/auth/node_modules/minimatch": { + "version": "7.4.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-7.4.6.tgz", + "integrity": "sha512-sBz8G/YjVniEz6lKPNpKxXwazJe4c19fEfV2GDMX6AjFz+MX9uDWIZW8XreVhkFW3fkIdTv/gxWr/Kks5FFAVw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@verdaccio/commons-api": { "version": "10.2.0", "resolved": "https://registry.npmjs.org/@verdaccio/commons-api/-/commons-api-10.2.0.tgz", @@ -18204,21 +18289,41 @@ "license": "MIT" }, "node_modules/@verdaccio/config": { - "version": "8.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/config/-/config-8.0.0-next-8.1.tgz", - "integrity": "sha512-goDVOH4e8xMUxjHybJpi5HwIecVFqzJ9jeNFrRUgtUUn0PtFuNMHgxOeqDKRVboZhc5HK90yed8URK/1O6VsUw==", + "version": "8.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/config/-/config-8.0.0-next-8.3.tgz", + "integrity": "sha512-bxlesiVi7A1GAHurq7RLFAFd67NTySSwtVMw7D1Ku2Q3v6kAF4TLqxKUrOaA14k0Zk4qyRu4OgXzbjDg0oARcQ==", "dev": true, "license": "MIT", "dependencies": { - "@verdaccio/core": "8.0.0-next-8.1", - "@verdaccio/utils": "7.0.1-next-8.1", + "@verdaccio/core": "8.0.0-next-8.3", + "@verdaccio/utils": "8.1.0-next-8.3", "debug": "4.3.7", "js-yaml": "4.1.0", "lodash": "4.17.21", "minimatch": "7.4.6" }, "engines": { - "node": ">=14" + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/verdaccio" + } + }, + "node_modules/@verdaccio/config/node_modules/@verdaccio/utils": { + "version": "8.1.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/utils/-/utils-8.1.0-next-8.3.tgz", + "integrity": "sha512-TR9S+RYpmOERqiqoXwtBFmWqyyByTUFSkZgfDqKbsBaDbYYUls738NKFQHpg/s6i2E3r46mHcWI+jue/EnOoSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@verdaccio/core": "8.0.0-next-8.3", + "lodash": "4.17.21", + "minimatch": "7.4.6", + "semver": "7.6.3" + }, + "engines": { + "node": ">=12" }, "funding": { "type": "opencollective", @@ -18272,9 +18377,9 @@ } }, "node_modules/@verdaccio/core": { - "version": "8.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/core/-/core-8.0.0-next-8.1.tgz", - "integrity": "sha512-kQRCB2wgXEh8H88G51eQgAFK9IxmnBtkQ8sY5FbmB6PbBkyHrbGcCp+2mtRqqo36j0W1VAlfM3XzoknMy6qQnw==", + "version": "8.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/core/-/core-8.0.0-next-8.3.tgz", + "integrity": "sha512-DPJmWANbpbtJN+cfz5CN4kfAl15F5JCv5qRAIQB9sOTNWjTw26cRpMYHFB9/buBQPuH3mWCjOwE6c+EVlvObLg==", "dev": true, "license": "MIT", "dependencies": { @@ -18286,7 +18391,7 @@ "semver": "7.6.3" }, "engines": { - "node": ">=14" + "node": ">=18" }, "funding": { "type": "opencollective", @@ -18347,13 +18452,12 @@ } }, "node_modules/@verdaccio/loaders": { - "version": "8.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/loaders/-/loaders-8.0.0-next-8.1.tgz", - "integrity": "sha512-mqGCUBs862g8mICZwX8CG92p1EZ1Un0DJ2DB7+iVu2TYaEeKoHoIdafabVdiYrbOjLcAOOBrMKE1Wnn14eLxpA==", + "version": "8.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/loaders/-/loaders-8.0.0-next-8.3.tgz", + "integrity": "sha512-7bIOdi+U1xSLRu0s1XxQwrV3zzzFaVaTX7JKFgj2tQvMy9AgzlpjbW1CqaH8OTVEqq03Pwvwj5hQlcvyzCwm1A==", "dev": true, "license": "MIT", "dependencies": { - "@verdaccio/logger": "8.0.0-next-8.1", "debug": "4.3.7", "lodash": "4.17.21" }, @@ -18422,14 +18526,14 @@ "license": "MIT" }, "node_modules/@verdaccio/logger": { - "version": "8.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/logger/-/logger-8.0.0-next-8.1.tgz", - "integrity": "sha512-w5kR0/umQkfH2F4PK5Fz9T6z3xz+twewawKLPTUfAgrVAOiWxcikGhhcHWhSGiJ0lPqIa+T0VYuLWMeVeDirGw==", + "version": "8.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/logger/-/logger-8.0.0-next-8.3.tgz", + "integrity": "sha512-rLjv/W9QfFHT80L1L/xXQwY3DaZ03NIq64/Bb4GHOudZgzl8C5Efe6u1q8Ti+jqXVAKCzEswMNISdq2XIQ+azQ==", "dev": true, "license": "MIT", "dependencies": { - "@verdaccio/logger-commons": "8.0.0-next-8.1", - "pino": "8.17.2" + "@verdaccio/logger-commons": "8.0.0-next-8.3", + "pino": "9.4.0" }, "engines": { "node": ">=18" @@ -18439,144 +18543,20 @@ "url": "https://opencollective.com/verdaccio" } }, - "node_modules/@verdaccio/logger-7": { - "version": "8.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/logger-7/-/logger-7-8.0.0-next-8.1.tgz", - "integrity": "sha512-V+/B1Wnct3IZ90q6HkI1a3dqbS0ds7s/5WPrS5cmBeLEw78/OGgF76XkhI2+lett7Un1CjVow7mcebOWcZ/Sqw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@verdaccio/logger-commons": "8.0.0-next-8.1", - "pino": "7.11.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/verdaccio" - } - }, - "node_modules/@verdaccio/logger-7/node_modules/duplexify": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", - "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", - "dev": true, - "license": "MIT", - "dependencies": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.2" - } - }, - "node_modules/@verdaccio/logger-7/node_modules/on-exit-leak-free": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-0.2.0.tgz", - "integrity": "sha512-dqaz3u44QbRXQooZLTUKU41ZrzYrcvLISVgbrzbyCMxpmSLJvZ3ZamIJIZ29P6OhZIkNIQKosdeM6t1LYbA9hg==", - "dev": true, - "license": "MIT" - }, - "node_modules/@verdaccio/logger-7/node_modules/pino": { - "version": "7.11.0", - "resolved": "https://registry.npmjs.org/pino/-/pino-7.11.0.tgz", - "integrity": "sha512-dMACeu63HtRLmCG8VKdy4cShCPKaYDR4youZqoSWLxl5Gu99HUw8bw75thbPv9Nip+H+QYX8o3ZJbTdVZZ2TVg==", - "dev": true, - "license": "MIT", - "dependencies": { - "atomic-sleep": "^1.0.0", - "fast-redact": "^3.0.0", - "on-exit-leak-free": "^0.2.0", - "pino-abstract-transport": "v0.5.0", - "pino-std-serializers": "^4.0.0", - "process-warning": "^1.0.0", - "quick-format-unescaped": "^4.0.3", - "real-require": "^0.1.0", - "safe-stable-stringify": "^2.1.0", - "sonic-boom": "^2.2.1", - "thread-stream": "^0.15.1" - }, - "bin": { - "pino": "bin.js" - } - }, - "node_modules/@verdaccio/logger-7/node_modules/pino-abstract-transport": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-0.5.0.tgz", - "integrity": "sha512-+KAgmVeqXYbTtU2FScx1XS3kNyfZ5TrXY07V96QnUSFqo2gAqlvmaxH67Lj7SWazqsMabf+58ctdTcBgnOLUOQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "duplexify": "^4.1.2", - "split2": "^4.0.0" - } - }, - "node_modules/@verdaccio/logger-7/node_modules/pino-std-serializers": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-4.0.0.tgz", - "integrity": "sha512-cK0pekc1Kjy5w9V2/n+8MkZwusa6EyyxfeQCB799CQRhRt/CqYKiWs5adeu8Shve2ZNffvfC/7J64A2PJo1W/Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/@verdaccio/logger-7/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/@verdaccio/logger-7/node_modules/real-require": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.1.0.tgz", - "integrity": "sha512-r/H9MzAWtrv8aSVjPCMFpDMl5q66GqtmmRkRjpHTsp4zBAa+snZyiQNlMONiUmEJcsnaw0wCauJ2GWODr/aFkg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 12.13.0" - } - }, - "node_modules/@verdaccio/logger-7/node_modules/sonic-boom": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-2.8.0.tgz", - "integrity": "sha512-kuonw1YOYYNOve5iHdSahXPOK49GqwA+LZhI6Wz/l0rP57iKyXXIHaRagOBHAPmGwJC6od2Z9zgvZ5loSgMlVg==", - "dev": true, - "license": "MIT", - "dependencies": { - "atomic-sleep": "^1.0.0" - } - }, - "node_modules/@verdaccio/logger-7/node_modules/thread-stream": { - "version": "0.15.2", - "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-0.15.2.tgz", - "integrity": "sha512-UkEhKIg2pD+fjkHQKyJO3yoIvAP3N6RlNFt2dUhcS1FGvCD1cQa1M/PGknCLFIyZdtJOWQjejp7bdNqmN7zwdA==", - "dev": true, - "license": "MIT", - "dependencies": { - "real-require": "^0.1.0" - } - }, "node_modules/@verdaccio/logger-commons": { - "version": "8.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/logger-commons/-/logger-commons-8.0.0-next-8.1.tgz", - "integrity": "sha512-jCge//RT4uaK7MarhpzcJeJ5Uvtu/DbJ1wvJQyGiFe+9AvxDGm3EUFXvawLFZ0lzYhmLt1nvm7kevcc3vOm2ZQ==", + "version": "8.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/logger-commons/-/logger-commons-8.0.0-next-8.3.tgz", + "integrity": "sha512-eMG0UDh66JcPX8ez57HCpsZ0FE9G0pCZ51Xei1MeCFVgNLkzrEcnwOcEGZhd3Tew79A4wGgjFFFywkrRIIomwg==", "dev": true, "license": "MIT", "dependencies": { - "@verdaccio/core": "8.0.0-next-8.1", - "@verdaccio/logger-prettify": "8.0.0-next-8.0", + "@verdaccio/core": "8.0.0-next-8.3", + "@verdaccio/logger-prettify": "8.0.0-next-8.1", "colorette": "2.0.20", "debug": "4.3.7" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "type": "opencollective", @@ -18584,20 +18564,20 @@ } }, "node_modules/@verdaccio/logger-prettify": { - "version": "8.0.0-next-8.0", - "resolved": "https://registry.npmjs.org/@verdaccio/logger-prettify/-/logger-prettify-8.0.0-next-8.0.tgz", - "integrity": "sha512-7mAFHZF2NPTubrOXYp2+fbMjRW5MMWXMeS3LcpupMAn5uPp6jkKEM8NC4IVJEevC5Ph4vPVZqpoPDpgXHEaV3Q==", + "version": "8.0.0-next-8.1", + "resolved": "https://registry.npmjs.org/@verdaccio/logger-prettify/-/logger-prettify-8.0.0-next-8.1.tgz", + "integrity": "sha512-vLhaGq0q7wtMCcqa0aQY6QOsMNarhTu/l4e6Z8mG/5LUH95GGLsBwpXLnKS94P3deIjsHhc9ycnEmG39txbQ1w==", "dev": true, "license": "MIT", "dependencies": { "colorette": "2.0.20", "dayjs": "1.11.13", "lodash": "4.17.21", - "pino-abstract-transport": "1.1.0", - "sonic-boom": "3.8.0" + "pino-abstract-transport": "1.2.0", + "sonic-boom": "3.8.1" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "type": "opencollective", @@ -18605,16 +18585,16 @@ } }, "node_modules/@verdaccio/middleware": { - "version": "8.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/middleware/-/middleware-8.0.0-next-8.1.tgz", - "integrity": "sha512-GpAdJYky1WmOERpxPoCkVSwTTJIsVAjqf2a2uQNvi7R3UZhs059JKhWcZjJMVCGV0uz9xgQvtb3DEuYGHqyaOg==", + "version": "8.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/middleware/-/middleware-8.0.0-next-8.3.tgz", + "integrity": "sha512-yUe4BGA2/o4GnFuKdquU53kI07xsLMKiMvZfj+M0ZFnFCshCqKmXOoSR8hbzQReUm/OZQSLOppliJn68xmXEVQ==", "dev": true, "license": "MIT", "dependencies": { - "@verdaccio/config": "8.0.0-next-8.1", - "@verdaccio/core": "8.0.0-next-8.1", - "@verdaccio/url": "13.0.0-next-8.1", - "@verdaccio/utils": "7.0.1-next-8.1", + "@verdaccio/config": "8.0.0-next-8.3", + "@verdaccio/core": "8.0.0-next-8.3", + "@verdaccio/url": "13.0.0-next-8.3", + "@verdaccio/utils": "8.1.0-next-8.3", "debug": "4.3.7", "express": "4.21.0", "express-rate-limit": "5.5.1", @@ -18623,42 +18603,25 @@ "mime": "2.6.0" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/verdaccio" } }, - "node_modules/@verdaccio/middleware/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/@verdaccio/middleware/node_modules/mime": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", - "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "node_modules/@verdaccio/middleware/node_modules/@verdaccio/utils": { + "version": "8.1.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/utils/-/utils-8.1.0-next-8.3.tgz", + "integrity": "sha512-TR9S+RYpmOERqiqoXwtBFmWqyyByTUFSkZgfDqKbsBaDbYYUls738NKFQHpg/s6i2E3r46mHcWI+jue/EnOoSg==", "dev": true, "license": "MIT", - "bin": { - "mime": "cli.js" + "dependencies": { + "@verdaccio/core": "8.0.0-next-8.3", + "lodash": "4.17.21", + "minimatch": "7.4.6", + "semver": "7.6.3" }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/@verdaccio/search-indexer": { - "version": "8.0.0-next-8.0", - "resolved": "https://registry.npmjs.org/@verdaccio/search-indexer/-/search-indexer-8.0.0-next-8.0.tgz", - "integrity": "sha512-VS9axVt8XAueiPceVCgaj9nlvYj5s/T4MkAILSf2rVZeFFOMUyxU3mddUCajSHzL+YpqCuzLLL9865sRRzOJ9w==", - "dev": true, - "license": "MIT", "engines": { "node": ">=12" }, @@ -18667,83 +18630,262 @@ "url": "https://opencollective.com/verdaccio" } }, - "node_modules/@verdaccio/signature": { - "version": "8.0.0-next-8.0", - "resolved": "https://registry.npmjs.org/@verdaccio/signature/-/signature-8.0.0-next-8.0.tgz", - "integrity": "sha512-klcc2UlCvQxXDV65Qewo2rZOfv7S1y8NekS/8uurSaCTjU35T+fz+Pbqz1S9XK9oQlMp4vCQ7w3iMPWQbvphEQ==", + "node_modules/@verdaccio/middleware/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dev": true, "license": "MIT", "dependencies": { - "debug": "4.3.7", - "jsonwebtoken": "9.0.2" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/verdaccio" + "balanced-match": "^1.0.0" } }, - "node_modules/@verdaccio/streams": { - "version": "10.2.1", - "resolved": "https://registry.npmjs.org/@verdaccio/streams/-/streams-10.2.1.tgz", - "integrity": "sha512-OojIG/f7UYKxC4dYX8x5ax8QhRx1b8OYUAMz82rUottCuzrssX/4nn5QE7Ank0DUSX3C9l/HPthc4d9uKRJqJQ==", + "node_modules/@verdaccio/middleware/node_modules/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", "dev": true, "license": "MIT", "engines": { - "node": ">=12", - "npm": ">=5" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/verdaccio" + "node": ">= 0.6" } }, - "node_modules/@verdaccio/tarball": { - "version": "13.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/tarball/-/tarball-13.0.0-next-8.1.tgz", - "integrity": "sha512-58uimU2Bqt9+s+9ixy7wK/nPCqbOXhhhr/MQjl+otIlsUhSeATndhFzEctz/W+4MhUDg0tUnE9HC2yeNHHAo1Q==", - "dev": true, - "license": "MIT", + "node_modules/@verdaccio/middleware/node_modules/express": { + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz", + "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==", + "dev": true, + "license": "MIT", "dependencies": { - "@verdaccio/core": "8.0.0-next-8.1", - "@verdaccio/url": "13.0.0-next-8.1", - "@verdaccio/utils": "7.0.1-next-8.1", + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.6.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.10", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/@verdaccio/middleware/node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/@verdaccio/middleware/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/@verdaccio/middleware/node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "dev": true, + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/@verdaccio/middleware/node_modules/minimatch": { + "version": "7.4.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-7.4.6.tgz", + "integrity": "sha512-sBz8G/YjVniEz6lKPNpKxXwazJe4c19fEfV2GDMX6AjFz+MX9uDWIZW8XreVhkFW3fkIdTv/gxWr/Kks5FFAVw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@verdaccio/middleware/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true, + "license": "MIT" + }, + "node_modules/@verdaccio/search-indexer": { + "version": "8.0.0-next-8.1", + "resolved": "https://registry.npmjs.org/@verdaccio/search-indexer/-/search-indexer-8.0.0-next-8.1.tgz", + "integrity": "sha512-Mwwg2o9GicZd6uiCbjBk6xZiWAH/O/2NbEkicPZINFIoJKy1NUihS4RexdDjcsxKEBEggGZXCkzHjzhfaZv1Gg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/verdaccio" + } + }, + "node_modules/@verdaccio/signature": { + "version": "8.0.0-next-8.1", + "resolved": "https://registry.npmjs.org/@verdaccio/signature/-/signature-8.0.0-next-8.1.tgz", + "integrity": "sha512-lHD/Z2FoPQTtDYz6ZlXhj/lrg0SFirHrwCGt/cibl1GlePpx78WPdo03tgAyl0Qf+I35n484/gR1l9eixBQqYw==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "4.3.7", + "jsonwebtoken": "9.0.2" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/verdaccio" + } + }, + "node_modules/@verdaccio/streams": { + "version": "10.2.1", + "resolved": "https://registry.npmjs.org/@verdaccio/streams/-/streams-10.2.1.tgz", + "integrity": "sha512-OojIG/f7UYKxC4dYX8x5ax8QhRx1b8OYUAMz82rUottCuzrssX/4nn5QE7Ank0DUSX3C9l/HPthc4d9uKRJqJQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12", + "npm": ">=5" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/verdaccio" + } + }, + "node_modules/@verdaccio/tarball": { + "version": "13.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/tarball/-/tarball-13.0.0-next-8.3.tgz", + "integrity": "sha512-jJatpGgiKLmTqyW4WlRpIkldd26rHDj5WTugWqa2Wxa1hVS0b6sZKT/9fEffOvjjAJv69Ued4nH1YCcB2hlhfA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@verdaccio/core": "8.0.0-next-8.3", + "@verdaccio/url": "13.0.0-next-8.3", + "@verdaccio/utils": "8.1.0-next-8.3", "debug": "4.3.7", "gunzip-maybe": "^1.4.2", "lodash": "4.17.21", "tar-stream": "^3.1.7" }, "engines": { - "node": ">=14" + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/verdaccio" + } + }, + "node_modules/@verdaccio/tarball/node_modules/@verdaccio/utils": { + "version": "8.1.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/utils/-/utils-8.1.0-next-8.3.tgz", + "integrity": "sha512-TR9S+RYpmOERqiqoXwtBFmWqyyByTUFSkZgfDqKbsBaDbYYUls738NKFQHpg/s6i2E3r46mHcWI+jue/EnOoSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@verdaccio/core": "8.0.0-next-8.3", + "lodash": "4.17.21", + "minimatch": "7.4.6", + "semver": "7.6.3" + }, + "engines": { + "node": ">=12" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/verdaccio" } }, + "node_modules/@verdaccio/tarball/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@verdaccio/tarball/node_modules/minimatch": { + "version": "7.4.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-7.4.6.tgz", + "integrity": "sha512-sBz8G/YjVniEz6lKPNpKxXwazJe4c19fEfV2GDMX6AjFz+MX9uDWIZW8XreVhkFW3fkIdTv/gxWr/Kks5FFAVw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@verdaccio/ui-theme": { - "version": "8.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/ui-theme/-/ui-theme-8.0.0-next-8.1.tgz", - "integrity": "sha512-9PxV8+jE2Tr+iy9DQW/bzny4YqOlW0mCZ9ct6jhcUW4GdfzU//gY2fBN/DDtQVmfbTy8smuj4Enyv5f0wCsnYg==", + "version": "8.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/ui-theme/-/ui-theme-8.0.0-next-8.3.tgz", + "integrity": "sha512-3A5v8bkvK5Bm+ERsZLYPv4vwo49dwfSy5a3WcAQgWde/oc6jytl/5XOZOaKX33mEiLj313k02j9ArhKuY1JjgA==", "dev": true, "license": "MIT" }, "node_modules/@verdaccio/url": { - "version": "13.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/@verdaccio/url/-/url-13.0.0-next-8.1.tgz", - "integrity": "sha512-h6pkJf+YtogImKgOrmPP9UVG3p3gtb67gqkQU0bZnK+SEKQt6Rkek/QvtJ8MbmciagYS18bDhpI8DxqLHjDfZQ==", + "version": "13.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/@verdaccio/url/-/url-13.0.0-next-8.3.tgz", + "integrity": "sha512-77BkY3j1d1ZPpmBodK2QlRwNP9tn/IneVry4RHX9j+1xNj4clvpn5rObFnVRGeYf2GPZrYZvIdzVP3BjXl0nkA==", "dev": true, "license": "MIT", "dependencies": { - "@verdaccio/core": "8.0.0-next-8.1", + "@verdaccio/core": "8.0.0-next-8.3", "debug": "4.3.7", "lodash": "4.17.21", "validator": "13.12.0" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "type": "opencollective", @@ -18770,6 +18912,45 @@ "url": "https://opencollective.com/verdaccio" } }, + "node_modules/@verdaccio/utils/node_modules/@verdaccio/core": { + "version": "8.0.0-next-8.1", + "resolved": "https://registry.npmjs.org/@verdaccio/core/-/core-8.0.0-next-8.1.tgz", + "integrity": "sha512-kQRCB2wgXEh8H88G51eQgAFK9IxmnBtkQ8sY5FbmB6PbBkyHrbGcCp+2mtRqqo36j0W1VAlfM3XzoknMy6qQnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "8.17.1", + "core-js": "3.37.1", + "http-errors": "2.0.0", + "http-status-codes": "2.3.0", + "process-warning": "1.0.0", + "semver": "7.6.3" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/verdaccio" + } + }, + "node_modules/@verdaccio/utils/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, "node_modules/@verdaccio/utils/node_modules/brace-expansion": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", @@ -18780,6 +18961,25 @@ "balanced-match": "^1.0.0" } }, + "node_modules/@verdaccio/utils/node_modules/core-js": { + "version": "3.37.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.37.1.tgz", + "integrity": "sha512-Xn6qmxrQZyB0FFY8E3bgRXei3lWDJHhvI+u0q9TKIYM49G8pAr0FgnnrFRAmsbptZL1yxRADVXn+x5AGsbBfyw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/@verdaccio/utils/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, "node_modules/@verdaccio/utils/node_modules/minimatch": { "version": "7.4.6", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-7.4.6.tgz", @@ -18876,6 +19076,17 @@ "node": ">=8" } }, + "node_modules/@zip.js/zip.js": { + "version": "2.7.52", + "resolved": "https://registry.npmjs.org/@zip.js/zip.js/-/zip.js-2.7.52.tgz", + "integrity": "sha512-+5g7FQswvrCHwYKNMd/KFxZSObctLSsQOgqBSi0LzwHo3li9Eh1w5cF5ndjQw9Zbr3ajVnd2+XyiX85gAetx1Q==", + "dev": true, + "engines": { + "bun": ">=0.7.0", + "deno": ">=1.0.0", + "node": ">=16.5.0" + } + }, "node_modules/abort-controller": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", @@ -19010,16 +19221,16 @@ } }, "node_modules/ansi-escapes": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-5.0.0.tgz", - "integrity": "sha512-5GFMVX8HqE/TB+FuBJGuO5XG0WrsA6ptUqoODaT/n9mmUaZFkqnBueB4leqGBCmrUHnCnC4PCZTCd0E7QQ83bA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz", + "integrity": "sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==", "dev": true, "license": "MIT", "dependencies": { - "type-fest": "^1.0.2" + "environment": "^1.0.0" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -19301,9 +19512,9 @@ "license": "MIT" }, "node_modules/async": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", - "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==", + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", "dev": true, "license": "MIT" }, @@ -19433,9 +19644,9 @@ "license": "0BSD" }, "node_modules/aws-cdk": { - "version": "2.158.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.158.0.tgz", - "integrity": "sha512-UcrxBG02RACrnTvfuyZiTuOz8gqOpnqjCMTdVmdpExv5qk9hddhtRAubNaC4xleHuNJnvskYqqVW+Y3Abh6zGQ==", + "version": "2.164.1", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.164.1.tgz", + "integrity": "sha512-dWRViQgHLe7GHkPIQGA+8EQSm8TBcxemyCC3HHW3wbLMWUDbspio9Dktmw5EmWxlFjjWh86Dk1JWf1zKQo8C5g==", "license": "Apache-2.0", "peer": true, "bin": { @@ -19449,9 +19660,9 @@ } }, "node_modules/aws-cdk-lib": { - "version": "2.158.0", - "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.158.0.tgz", - "integrity": "sha512-Pl9CCLM+XRTy6nyyRJM1INEMtwIlZOib0FWyq9i9E388vurw7sNVJ6tAsfLpGIOLHsFQCbF4f6OZ0KSVxmMaiA==", + "version": "2.164.1", + "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.164.1.tgz", + "integrity": "sha512-jNvVmfZJbZoAYU94b5dzTlF2z6JXJ204NgcYY5haOa6mq3m2bzdYPXnPtB5kpAX3oBi++yoRdmLhqgckdEhUZA==", "bundleDependencies": [ "@balena/dockerignore", "case", @@ -19470,7 +19681,7 @@ "@aws-cdk/asset-awscli-v1": "^2.2.202", "@aws-cdk/asset-kubectl-v20": "^2.1.2", "@aws-cdk/asset-node-proxy-agent-v6": "^2.1.0", - "@aws-cdk/cloud-assembly-schema": "^36.0.24", + "@aws-cdk/cloud-assembly-schema": "^38.0.0", "@balena/dockerignore": "^1.0.2", "case": "1.6.3", "fs-extra": "^11.2.0", @@ -19910,9 +20121,9 @@ } }, "node_modules/b4a": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", - "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==", + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.7.tgz", + "integrity": "sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg==", "dev": true, "license": "Apache-2.0" }, @@ -20058,9 +20269,9 @@ "license": "MIT" }, "node_modules/bare-events": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.4.2.tgz", - "integrity": "sha512-qMKFd2qG/36aA4GwvKq8MxnPgCQAmBWmSyLWsJcbn8v03wvIPQ/hG1Ms8bPzndZxMDoHpxez5VOS+gC9Yi24/Q==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.5.0.tgz", + "integrity": "sha512-/E8dDe9dsbLyh2qrZ64PEPadOQ0F4gbl1sUJOrmph7xOiIxfY8vwab/4bFLh4Y88/Hk/ujKcrQKc+ps0mv873A==", "dev": true, "license": "Apache-2.0", "optional": true @@ -20184,22 +20395,6 @@ "dev": true, "license": "MIT" }, - "node_modules/body-parser/node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.0.6" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/bowser": { "version": "2.11.0", "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", @@ -20828,48 +21023,102 @@ } }, "node_modules/cli-cursor": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", + "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", + "dev": true, + "license": "MIT", + "dependencies": { + "restore-cursor": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-spinners": { + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", + "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", - "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-4.0.0.tgz", + "integrity": "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==", "dev": true, "license": "MIT", "dependencies": { - "restore-cursor": "^4.0.0" + "slice-ansi": "^5.0.0", + "string-width": "^7.0.0" + }, + "engines": { + "node": ">=18" }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, + "license": "MIT", "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, - "node_modules/cli-spinners": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", - "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", + "node_modules/cli-truncate/node_modules/emoji-regex": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", + "dev": true, + "license": "MIT" + }, + "node_modules/cli-truncate/node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", + "dev": true, "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, "engines": { - "node": ">=6" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/cli-truncate": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-3.1.0.tgz", - "integrity": "sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==", + "node_modules/cli-truncate/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "dev": true, "license": "MIT", "dependencies": { - "slice-ansi": "^5.0.0", - "string-width": "^5.0.0" + "ansi-regex": "^6.0.1" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, "node_modules/cli-width": { @@ -20882,9 +21131,9 @@ } }, "node_modules/clipanion": { - "version": "4.0.0-rc.3", - "resolved": "https://registry.npmjs.org/clipanion/-/clipanion-4.0.0-rc.3.tgz", - "integrity": "sha512-+rJOJMt2N6Oikgtfqmo/Duvme7uz3SIedL2b6ycgCztQMiTfr3aQh2DDyLHl+QUPClKMNpSg3gDJFvNQYIcq1g==", + "version": "4.0.0-rc.4", + "resolved": "https://registry.npmjs.org/clipanion/-/clipanion-4.0.0-rc.4.tgz", + "integrity": "sha512-CXkMQxU6s9GklO/1f714dkKBMu1lopS1WFF0B8o4AxPykR1hpozxSiUZ5ZUeBjfPgCWqbcNOtZVFhB8Lkfp1+Q==", "dev": true, "license": "MIT", "workspaces": [ @@ -20999,13 +21248,13 @@ } }, "node_modules/commander": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-11.0.0.tgz", - "integrity": "sha512-9HMlXtt/BNoYr8ooyjjNRdIilOTkVJXB+GhxMTtOKwk0R4j4lS4NpjuqmRxroBfnfTSHQIHQB7wryHhXarNjmQ==", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", + "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", "dev": true, "license": "MIT", "engines": { - "node": ">=16" + "node": ">=18" } }, "node_modules/comment-parser": { @@ -21101,12 +21350,11 @@ } }, "node_modules/constructs": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/constructs/-/constructs-10.3.0.tgz", - "integrity": "sha512-vbK8i3rIb/xwZxSpTjz3SagHn1qq9BChLEfy5Hf6fB3/2eFbrwt2n9kHwQcS0CPTRBesreeAcsJfMq2229FnbQ==", - "license": "Apache-2.0", + "version": "10.3.2", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-10.3.2.tgz", + "integrity": "sha512-odjsmhoBKRWa2F/Z3edOSZCb7IgxAL5usXQMRKoINMJzcFfC1GvcbO6Dd/xMGLRv4J/tEsjSLwqLxRfJrjPsQw==", "engines": { - "node": ">= 16.14.0" + "node": ">= 18.12.0" } }, "node_modules/content-disposition": { @@ -21139,9 +21387,9 @@ "license": "MIT" }, "node_modules/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", "dev": true, "license": "MIT", "engines": { @@ -21820,6 +22068,19 @@ "node": ">=4" } }, + "node_modules/environment": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", + "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", @@ -23245,9 +23506,9 @@ } }, "node_modules/express": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz", - "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==", + "version": "4.21.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.1.tgz", + "integrity": "sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==", "dev": true, "license": "MIT", "dependencies": { @@ -23256,7 +23517,7 @@ "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.6.0", + "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -23311,22 +23572,6 @@ "dev": true, "license": "MIT" }, - "node_modules/express/node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.0.6" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -23755,18 +24000,18 @@ } }, "node_modules/form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz", + "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==", "dev": true, "license": "MIT", "dependencies": { "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", + "combined-stream": "^1.0.8", "mime-types": "^2.1.12" }, "engines": { - "node": ">= 0.12" + "node": ">= 6" } }, "node_modules/formdata-polyfill": { @@ -23899,6 +24144,19 @@ "node": "6.* || 8.* || >= 10.*" } }, + "node_modules/get-east-asian-width": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz", + "integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/get-intrinsic": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", @@ -24364,15 +24622,15 @@ } }, "node_modules/http-signature": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", - "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.4.0.tgz", + "integrity": "sha512-G5akfn7eKbpDN+8nPS/cb57YeA1jLTVxjpCj7tmm3QKPdyDy7T+qSC40e9ptydSWvkwjSXw1VbkpyEm39ukeAg==", "dev": true, "license": "MIT", "dependencies": { "assert-plus": "^1.0.0", "jsprim": "^2.0.2", - "sshpk": "^1.14.1" + "sshpk": "^1.18.0" }, "engines": { "node": ">=0.10" @@ -25773,13 +26031,16 @@ } }, "node_modules/lilconfig": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", - "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.2.tgz", + "integrity": "sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==", "dev": true, "license": "MIT", "engines": { - "node": ">=10" + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" } }, "node_modules/lines-and-columns": { @@ -25790,28 +26051,28 @@ "license": "MIT" }, "node_modules/lint-staged": { - "version": "13.3.0", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.3.0.tgz", - "integrity": "sha512-mPRtrYnipYYv1FEE134ufbWpeggNTo+O/UPzngoaKzbzHAthvR55am+8GfHTnqNRQVRRrYQLGW9ZyUoD7DsBHQ==", + "version": "15.2.10", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-15.2.10.tgz", + "integrity": "sha512-5dY5t743e1byO19P9I4b3x8HJwalIznL5E1FWYnU6OWw33KxNBSLAc6Cy7F2PsFEO8FKnLwjwm5hx7aMF0jzZg==", "dev": true, "license": "MIT", "dependencies": { - "chalk": "5.3.0", - "commander": "11.0.0", - "debug": "4.3.4", - "execa": "7.2.0", - "lilconfig": "2.1.0", - "listr2": "6.6.1", - "micromatch": "4.0.5", - "pidtree": "0.6.0", - "string-argv": "0.3.2", - "yaml": "2.3.1" + "chalk": "~5.3.0", + "commander": "~12.1.0", + "debug": "~4.3.6", + "execa": "~8.0.1", + "lilconfig": "~3.1.2", + "listr2": "~8.2.4", + "micromatch": "~4.0.8", + "pidtree": "~0.6.0", + "string-argv": "~0.3.2", + "yaml": "~2.5.0" }, "bin": { "lint-staged": "bin/lint-staged.js" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": ">=18.12.0" }, "funding": { "url": "https://opencollective.com/lint-staged" @@ -25830,123 +26091,107 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/lint-staged/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "node_modules/listr2": { + "version": "8.2.5", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-8.2.5.tgz", + "integrity": "sha512-iyAZCeyD+c1gPyE9qpFu8af0Y+MRtmKOncdGoA2S5EY8iFq99dmmvkNnHiWo+pj0s7yH7l3KPIgee77tKpXPWQ==", "dev": true, "license": "MIT", "dependencies": { - "ms": "2.1.2" + "cli-truncate": "^4.0.0", + "colorette": "^2.0.20", + "eventemitter3": "^5.0.1", + "log-update": "^6.1.0", + "rfdc": "^1.4.1", + "wrap-ansi": "^9.0.0" }, "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } + "node": ">=18.0.0" } }, - "node_modules/lint-staged/node_modules/execa": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", - "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "node_modules/listr2/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true, "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.1", - "human-signals": "^4.3.0", - "is-stream": "^3.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^5.1.0", - "onetime": "^6.0.0", - "signal-exit": "^3.0.7", - "strip-final-newline": "^3.0.0" - }, "engines": { - "node": "^14.18.0 || ^16.14.0 || >=18.0.0" + "node": ">=12" }, "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" + "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, - "node_modules/lint-staged/node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "node_modules/listr2/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", "dev": true, "license": "MIT", "engines": { - "node": ">=10" + "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/lint-staged/node_modules/human-signals": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", - "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "node_modules/listr2/node_modules/emoji-regex": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=14.18.0" - } + "license": "MIT" }, - "node_modules/lint-staged/node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "node_modules/listr2/node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", "dev": true, "license": "MIT", "dependencies": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" }, "engines": { - "node": ">=8.6" + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/lint-staged/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true, - "license": "MIT" - }, - "node_modules/lint-staged/node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "node_modules/listr2/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "dev": true, - "license": "ISC" + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } }, - "node_modules/listr2": { - "version": "6.6.1", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-6.6.1.tgz", - "integrity": "sha512-+rAXGHh0fkEWdXBmX+L6mmfmXmXvDGEKzkjxO+8mP3+nI/r/CWznVBvsibXdxda9Zz0OW2e2ikphN3OwCT/jSg==", + "node_modules/listr2/node_modules/wrap-ansi": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", "dev": true, "license": "MIT", "dependencies": { - "cli-truncate": "^3.1.0", - "colorette": "^2.0.20", - "eventemitter3": "^5.0.1", - "log-update": "^5.0.1", - "rfdc": "^1.3.0", - "wrap-ansi": "^8.1.0" + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" }, "engines": { - "node": ">=16.0.0" - }, - "peerDependencies": { - "enquirer": ">= 2.3.0 < 3" + "node": ">=18" }, - "peerDependenciesMeta": { - "enquirer": { - "optional": true - } + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, "node_modules/locate-path": { @@ -26162,20 +26407,20 @@ } }, "node_modules/log-update": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-5.0.1.tgz", - "integrity": "sha512-5UtUDQ/6edw4ofyljDNcOVJQ4c7OjDro4h3y8e1GQL5iYElYclVHJ3zeWchylvMaKnDbDilC8irOVyexnA/Slw==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz", + "integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==", "dev": true, "license": "MIT", "dependencies": { - "ansi-escapes": "^5.0.0", - "cli-cursor": "^4.0.0", - "slice-ansi": "^5.0.0", - "strip-ansi": "^7.0.1", - "wrap-ansi": "^8.0.1" + "ansi-escapes": "^7.0.0", + "cli-cursor": "^5.0.0", + "slice-ansi": "^7.1.0", + "strip-ansi": "^7.1.0", + "wrap-ansi": "^9.0.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -26194,6 +26439,77 @@ "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, + "node_modules/log-update/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-update/node_modules/emoji-regex": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", + "dev": true, + "license": "MIT" + }, + "node_modules/log-update/node_modules/is-fullwidth-code-point": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz", + "integrity": "sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "get-east-asian-width": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/slice-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.0.tgz", + "integrity": "sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "is-fullwidth-code-point": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/log-update/node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/log-update/node_modules/strip-ansi": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", @@ -26210,6 +26526,24 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, + "node_modules/log-update/node_modules/wrap-ansi": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/long": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz", @@ -26448,7 +26782,20 @@ "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", "license": "MIT", "engines": { - "node": ">=12" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mimic-function": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", + "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -26541,66 +26888,6 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/mv": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", - "integrity": "sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "mkdirp": "~0.5.1", - "ncp": "~2.0.0", - "rimraf": "~2.4.0" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/mv/node_modules/glob": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "2 || 3", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - } - }, - "node_modules/mv/node_modules/mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", - "dev": true, - "license": "MIT", - "dependencies": { - "minimist": "^1.2.6" - }, - "bin": { - "mkdirp": "bin/cmd.js" - } - }, - "node_modules/mv/node_modules/rimraf": { - "version": "2.4.5", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "integrity": "sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ==", - "deprecated": "Rimraf versions prior to v4 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^6.0.1" - }, - "bin": { - "rimraf": "bin.js" - } - }, "node_modules/mysql2": { "version": "3.9.9", "resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.9.tgz", @@ -26682,16 +26969,6 @@ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "license": "MIT" }, - "node_modules/ncp": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "integrity": "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==", - "dev": true, - "license": "MIT", - "bin": { - "ncp": "bin/ncp" - } - }, "node_modules/negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", @@ -27646,32 +27923,32 @@ } }, "node_modules/pino": { - "version": "8.17.2", - "resolved": "https://registry.npmjs.org/pino/-/pino-8.17.2.tgz", - "integrity": "sha512-LA6qKgeDMLr2ux2y/YiUt47EfgQ+S9LznBWOJdN3q1dx2sv0ziDLUBeVpyVv17TEcGCBuWf0zNtg3M5m1NhhWQ==", + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/pino/-/pino-9.4.0.tgz", + "integrity": "sha512-nbkQb5+9YPhQRz/BeQmrWpEknAaqjpAqRK8NwJpmrX/JHu7JuZC5G1CeAwJDJfGes4h+YihC6in3Q2nGb+Y09w==", "dev": true, "license": "MIT", "dependencies": { "atomic-sleep": "^1.0.0", "fast-redact": "^3.1.1", "on-exit-leak-free": "^2.1.0", - "pino-abstract-transport": "v1.1.0", - "pino-std-serializers": "^6.0.0", - "process-warning": "^3.0.0", + "pino-abstract-transport": "^1.2.0", + "pino-std-serializers": "^7.0.0", + "process-warning": "^4.0.0", "quick-format-unescaped": "^4.0.3", "real-require": "^0.2.0", "safe-stable-stringify": "^2.3.1", - "sonic-boom": "^3.7.0", - "thread-stream": "^2.0.0" + "sonic-boom": "^4.0.1", + "thread-stream": "^3.0.0" }, "bin": { "pino": "bin.js" } }, "node_modules/pino-abstract-transport": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-1.1.0.tgz", - "integrity": "sha512-lsleG3/2a/JIWUtf9Q5gUNErBqwIu1tUKTT3dUzaf5DySw9ra1wcqKjJjLX1VTY64Wk1eEOYsVGSaGfCK85ekA==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-1.2.0.tgz", + "integrity": "sha512-Guhh8EZfPCfH+PMXAb6rKOjGQEoy0xlAIn+irODG5kgfYV+BQ0rGYYWTIel3P5mmyXqkYkPmdIkywsn6QKUR1Q==", "dev": true, "license": "MIT", "dependencies": { @@ -27763,19 +28040,29 @@ } }, "node_modules/pino-std-serializers": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-6.2.2.tgz", - "integrity": "sha512-cHjPPsE+vhj/tnhCy/wiMh3M3z3h/j15zHQX+S9GkTBgqJuTuJzYJ4gUyACLhDaJ7kk9ba9iRDmbH2tJU03OiA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz", + "integrity": "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA==", "dev": true, "license": "MIT" }, "node_modules/pino/node_modules/process-warning": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-3.0.0.tgz", - "integrity": "sha512-mqn0kFRl0EoqhnL0GQ0veqFHyIN1yig9RHh/InzORTUiZHFRAur+aMtRkELNwGs9aNwKS6tg/An4NYBPGwvtzQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-4.0.0.tgz", + "integrity": "sha512-/MyYDxttz7DfGMMHiysAsFE4qF+pQYAA8ziO/3NcRVrQ5fSk+Mns4QZA/oRPFzvcqNoVJXQNWNAsdwBXLUkQKw==", "dev": true, "license": "MIT" }, + "node_modules/pino/node_modules/sonic-boom": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz", + "integrity": "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==", + "dev": true, + "license": "MIT", + "dependencies": { + "atomic-sleep": "^1.0.0" + } + }, "node_modules/pkg-dir": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-5.0.0.tgz", @@ -28070,13 +28357,13 @@ } }, "node_modules/qs": { - "version": "6.10.4", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.4.tgz", - "integrity": "sha512-OQiU+C+Ds5qiH91qh/mg0w+8nwQuLjM4F4M/PbmhDOoYehPh+Fb0bDjtR1sOvy7YKxvj28Y/M0PhP5uVX0kB+g==", + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", "dev": true, "license": "BSD-3-Clause", "dependencies": { - "side-channel": "^1.0.4" + "side-channel": "^1.0.6" }, "engines": { "node": ">=0.6" @@ -28532,55 +28819,38 @@ } }, "node_modules/restore-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", - "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", + "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", "dev": true, "license": "MIT", "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" + "onetime": "^7.0.0", + "signal-exit": "^4.1.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/restore-cursor/node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/restore-cursor/node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", + "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", "dev": true, "license": "MIT", "dependencies": { - "mimic-fn": "^2.1.0" + "mimic-function": "^5.0.0" }, "engines": { - "node": ">=6" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/restore-cursor/node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC" - }, "node_modules/reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", @@ -29194,9 +29464,9 @@ } }, "node_modules/sonic-boom": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-3.8.0.tgz", - "integrity": "sha512-ybz6OYOUjoQQCQ/i4LU8kaToD8ACtYP+Cj5qd2AO36bwbdewxWJ3ArmJ2cr6AvxlL2o0PqnCcPGUgkILbfkaCA==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-3.8.1.tgz", + "integrity": "sha512-y4Z8LCDBuum+PBP3lSV7RHrXscqksve/bi0as7mhwVnBW+/wUqKT/2Kb7um8yqcFy0duYbbPxzt89Zy2nOCaxg==", "dev": true, "license": "MIT", "dependencies": { @@ -29917,14 +30187,11 @@ } }, "node_modules/text-decoder": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.0.tgz", - "integrity": "sha512-n1yg1mOj9DNpk3NeZOx7T6jchTbyJS3i3cucbNN6FcdPriMZx7NsgrGpWWdWZZGxD7ES1XB+3uoqHMgOKaN+fg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.1.tgz", + "integrity": "sha512-x9v3H/lTKIJKQQe7RPQkLfKAnc9lUTkWDypIQgTzPJAq+5/GCDHonmshfvlsNSj58yyshbIJJDLmU15qNERrXQ==", "dev": true, - "license": "Apache-2.0", - "dependencies": { - "b4a": "^1.6.4" - } + "license": "Apache-2.0" }, "node_modules/text-table": { "version": "0.2.0", @@ -29933,9 +30200,9 @@ "license": "MIT" }, "node_modules/thread-stream": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-2.7.0.tgz", - "integrity": "sha512-qQiRWsU/wvNolI6tbbCKd9iKaTnCXsTwVxhhKM6nctPdujTyztjlbUkUTUymidWcMnZ5pWR0ej4a0tjsW021vw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz", + "integrity": "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A==", "dev": true, "license": "MIT", "dependencies": { @@ -30255,19 +30522,6 @@ "node": ">=4" } }, - "node_modules/type-fest": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", - "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -30736,33 +30990,33 @@ } }, "node_modules/verdaccio": { - "version": "5.32.2", - "resolved": "https://registry.npmjs.org/verdaccio/-/verdaccio-5.32.2.tgz", - "integrity": "sha512-QnVYIUvwB884fwVcA/D+x7AabsRPlTPyYAKMtExm8kJjiH+s2LGK2qX2o3I4VmYXqBR3W9b8gEnyQnGwQhUPsw==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/verdaccio/-/verdaccio-6.0.1.tgz", + "integrity": "sha512-fGP5V18Pz3yIDZNZjQrBqRnMr4sDn8fPO7eoZTIX2D7MvqoGELRG88Townq8PhYU5KAPZ2c2OnNRr8SKReO3Ag==", "dev": true, "license": "MIT", "dependencies": { - "@cypress/request": "3.0.1", - "@verdaccio/auth": "8.0.0-next-8.1", - "@verdaccio/config": "8.0.0-next-8.1", - "@verdaccio/core": "8.0.0-next-8.1", + "@cypress/request": "3.0.5", + "@verdaccio/auth": "8.0.0-next-8.3", + "@verdaccio/config": "8.0.0-next-8.3", + "@verdaccio/core": "8.0.0-next-8.3", "@verdaccio/local-storage-legacy": "11.0.2", - "@verdaccio/logger-7": "8.0.0-next-8.1", - "@verdaccio/middleware": "8.0.0-next-8.1", - "@verdaccio/search-indexer": "8.0.0-next-8.0", - "@verdaccio/signature": "8.0.0-next-8.0", + "@verdaccio/logger": "8.0.0-next-8.3", + "@verdaccio/middleware": "8.0.0-next-8.3", + "@verdaccio/search-indexer": "8.0.0-next-8.1", + "@verdaccio/signature": "8.0.0-next-8.1", "@verdaccio/streams": "10.2.1", - "@verdaccio/tarball": "13.0.0-next-8.1", - "@verdaccio/ui-theme": "8.0.0-next-8.1", - "@verdaccio/url": "13.0.0-next-8.1", + "@verdaccio/tarball": "13.0.0-next-8.3", + "@verdaccio/ui-theme": "8.0.0-next-8.3", + "@verdaccio/url": "13.0.0-next-8.3", "@verdaccio/utils": "7.0.1-next-8.1", - "async": "3.2.5", - "clipanion": "4.0.0-rc.3", + "async": "3.2.6", + "clipanion": "4.0.0-rc.4", "compression": "1.7.4", "cors": "2.8.5", - "debug": "^4.3.5", - "envinfo": "7.13.0", - "express": "4.21.0", + "debug": "4.3.7", + "envinfo": "7.14.0", + "express": "4.21.1", "express-rate-limit": "5.5.1", "fast-safe-stringify": "2.1.1", "handlebars": "4.7.8", @@ -30774,18 +31028,17 @@ "lru-cache": "7.18.3", "mime": "3.0.0", "mkdirp": "1.0.4", - "mv": "2.1.1", "pkginfo": "0.4.1", "semver": "7.6.3", "validator": "13.12.0", - "verdaccio-audit": "13.0.0-next-8.1", - "verdaccio-htpasswd": "13.0.0-next-8.1" + "verdaccio-audit": "13.0.0-next-8.3", + "verdaccio-htpasswd": "13.0.0-next-8.3" }, "bin": { "verdaccio": "bin/verdaccio" }, "engines": { - "node": ">=14" + "node": ">=18" }, "funding": { "type": "opencollective", @@ -30793,35 +31046,105 @@ } }, "node_modules/verdaccio-audit": { - "version": "13.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/verdaccio-audit/-/verdaccio-audit-13.0.0-next-8.1.tgz", - "integrity": "sha512-EEfUeC1kHuErtwF9FC670W+EXHhcl+iuigONkcprwRfkPxmdBs+Hx36745hgAMZ9SCqedNECaycnGF3tZ3VYfw==", + "version": "13.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/verdaccio-audit/-/verdaccio-audit-13.0.0-next-8.3.tgz", + "integrity": "sha512-/mgRfsg+RENtUggcf0xnfPKNJJqidyKING3nyHgS3vABE6CBe4/fWQKs67X4mfCFiIVLf0PiOTFGT8tmwSZubA==", "dev": true, "license": "MIT", "dependencies": { - "@verdaccio/config": "8.0.0-next-8.1", - "@verdaccio/core": "8.0.0-next-8.1", + "@verdaccio/config": "8.0.0-next-8.3", + "@verdaccio/core": "8.0.0-next-8.3", "express": "4.21.0", "https-proxy-agent": "5.0.1", "node-fetch": "cjs" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/verdaccio" } }, + "node_modules/verdaccio-audit/node_modules/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/verdaccio-audit/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/verdaccio-audit/node_modules/express": { + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz", + "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==", + "dev": true, + "license": "MIT", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.6.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.10", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/verdaccio-audit/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true, + "license": "MIT" + }, "node_modules/verdaccio-htpasswd": { - "version": "13.0.0-next-8.1", - "resolved": "https://registry.npmjs.org/verdaccio-htpasswd/-/verdaccio-htpasswd-13.0.0-next-8.1.tgz", - "integrity": "sha512-BfvmO+ZdbwfttOwrdTPD6Bccr1ZfZ9Tk/9wpXamxdWB/XPWlk3FtyGsvqCmxsInRLPhQ/FSk9c3zRCGvICTFYg==", + "version": "13.0.0-next-8.3", + "resolved": "https://registry.npmjs.org/verdaccio-htpasswd/-/verdaccio-htpasswd-13.0.0-next-8.3.tgz", + "integrity": "sha512-Nl2rEEyGHJQ/1/93BE9TxBMRN4tKF/51VYTb+hWDQFhDEDAR20rcvJ4ND0jOIIZljI7Lg/WrCPBh90u5IyPJ5Q==", "dev": true, "license": "MIT", "dependencies": { - "@verdaccio/core": "8.0.0-next-8.1", - "@verdaccio/file-locking": "13.0.0-next-8.0", + "@verdaccio/core": "8.0.0-next-8.3", + "@verdaccio/file-locking": "13.0.0-next-8.1", "apache-md5": "1.1.8", "bcryptjs": "2.4.3", "core-js": "3.37.1", @@ -30830,7 +31153,7 @@ "unix-crypt-td-js": "1.1.4" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "type": "opencollective", @@ -30838,16 +31161,16 @@ } }, "node_modules/verdaccio-htpasswd/node_modules/@verdaccio/file-locking": { - "version": "13.0.0-next-8.0", - "resolved": "https://registry.npmjs.org/@verdaccio/file-locking/-/file-locking-13.0.0-next-8.0.tgz", - "integrity": "sha512-28XRwpKiE3Z6KsnwE7o8dEM+zGWOT+Vef7RVJyUlG176JVDbGGip3HfCmFioE1a9BklLyGEFTu6D69BzfbRkzA==", + "version": "13.0.0-next-8.1", + "resolved": "https://registry.npmjs.org/@verdaccio/file-locking/-/file-locking-13.0.0-next-8.1.tgz", + "integrity": "sha512-9PhfRKXsWaWJkON/2/jdG5/N+9Kk4UINvbMGuJz0A/PbzIYfVrBhry7fcnjn6hFKxVPTbSOSSztRzLF30nmBFg==", "dev": true, "license": "MIT", "dependencies": { "lockfile": "1.0.4" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "type": "opencollective", @@ -30873,19 +31196,6 @@ "dev": true, "license": "Python-2.0" }, - "node_modules/verdaccio/node_modules/envinfo": { - "version": "7.13.0", - "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.13.0.tgz", - "integrity": "sha512-cvcaMr7KqXVh4nyzGTVqTum+gAiL265x5jUWQIDLq//zOGbW+gSW/C+OWLleY/rs9Qole6AZLMXPbtIFQbqu+Q==", - "dev": true, - "license": "MIT", - "bin": { - "envinfo": "dist/cli.js" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/verdaccio/node_modules/handlebars": { "version": "4.7.8", "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", @@ -31304,11 +31614,14 @@ "license": "ISC" }, "node_modules/yaml": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.1.tgz", - "integrity": "sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==", + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.1.tgz", + "integrity": "sha512-bLQOjaX/ADgQ20isPJRvF0iRUHIxVhYvr53Of7wGcWlO2jvtUlH5m87DsmulFVxRpNLOnI4tB6p/oh8D7kpn9Q==", "dev": true, "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, "engines": { "node": ">= 14" } @@ -31445,10 +31758,10 @@ }, "packages/ai-constructs": { "name": "@aws-amplify/ai-constructs", - "version": "0.2.0", + "version": "0.7.0", "license": "Apache-2.0", "dependencies": { - "@aws-amplify/backend-output-schemas": "^1.2.1", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/platform-core": "^1.1.0", "@aws-amplify/plugin-types": "^1.0.1", "@aws-sdk/client-bedrock-runtime": "^3.622.0", @@ -31460,7 +31773,7 @@ "typescript": "^5.0.0" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }, @@ -31470,32 +31783,32 @@ }, "packages/auth-construct": { "name": "@aws-amplify/auth-construct", - "version": "1.3.1", + "version": "1.3.2", "license": "Apache-2.0", "dependencies": { - "@aws-amplify/backend-output-schemas": "^1.1.0", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/backend-output-storage": "^1.1.2", "@aws-amplify/plugin-types": "^1.2.2", "@aws-sdk/util-arn-parser": "^3.568.0" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }, "packages/backend": { "name": "@aws-amplify/backend", - "version": "1.3.1", + "version": "1.5.2", "license": "Apache-2.0", "dependencies": { "@aws-amplify/backend-auth": "^1.2.0", - "@aws-amplify/backend-data": "^1.1.4", - "@aws-amplify/backend-function": "^1.5.0", - "@aws-amplify/backend-output-schemas": "^1.2.1", + "@aws-amplify/backend-data": "^1.1.5", + "@aws-amplify/backend-function": "^1.7.2", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/backend-output-storage": "^1.1.2", - "@aws-amplify/backend-secret": "^1.1.2", + "@aws-amplify/backend-secret": "^1.1.4", "@aws-amplify/backend-storage": "^1.2.1", - "@aws-amplify/client-config": "^1.4.0", + "@aws-amplify/client-config": "^1.5.0", "@aws-amplify/data-schema": "^1.0.0", "@aws-amplify/platform-core": "^1.1.0", "@aws-amplify/plugin-types": "^1.3.0", @@ -31508,23 +31821,24 @@ "aws-lambda": "^1.0.7" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }, "packages/backend-ai": { "name": "@aws-amplify/backend-ai", - "version": "0.1.2", + "version": "0.3.4", "license": "Apache-2.0", "dependencies": { - "@aws-amplify/ai-constructs": "^0.2.0", - "@aws-amplify/backend-output-schemas": "^1.2.1", + "@aws-amplify/ai-constructs": "^0.7.0", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/backend-output-storage": "^1.0.2", + "@aws-amplify/data-schema-types": "^1.2.0", "@aws-amplify/platform-core": "^1.1.0", "@aws-amplify/plugin-types": "^1.0.1" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }, @@ -31542,19 +31856,19 @@ "@aws-amplify/platform-core": "^1.0.6" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }, "packages/backend-data": { "name": "@aws-amplify/backend-data", - "version": "1.1.4", + "version": "1.1.5", "license": "Apache-2.0", "dependencies": { - "@aws-amplify/backend-output-schemas": "^1.1.0", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/backend-output-storage": "^1.1.2", "@aws-amplify/data-construct": "^1.10.1", - "@aws-amplify/data-schema-types": "^1.1.1", + "@aws-amplify/data-schema-types": "^1.2.0", "@aws-amplify/plugin-types": "^1.2.2" }, "devDependencies": { @@ -31563,13 +31877,13 @@ "@aws-amplify/platform-core": "^1.0.7" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }, "packages/backend-deployer": { "name": "@aws-amplify/backend-deployer", - "version": "1.1.4", + "version": "1.1.5", "license": "Apache-2.0", "dependencies": { "@aws-amplify/platform-core": "^1.0.6", @@ -31578,16 +31892,16 @@ "tsx": "^4.6.1" }, "peerDependencies": { - "aws-cdk": "^2.152.0", + "aws-cdk": "^2.158.0", "typescript": "^5.0.0" } }, "packages/backend-function": { "name": "@aws-amplify/backend-function", - "version": "1.5.0", + "version": "1.7.2", "license": "Apache-2.0", "dependencies": { - "@aws-amplify/backend-output-schemas": "^1.1.0", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/backend-output-storage": "^1.1.2", "@aws-amplify/plugin-types": "^1.3.0", "execa": "^8.0.1" @@ -31600,7 +31914,7 @@ "uuid": "^9.0.1" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }, @@ -31620,7 +31934,7 @@ }, "packages/backend-output-schemas": { "name": "@aws-amplify/backend-output-schemas", - "version": "1.2.1", + "version": "1.4.0", "license": "Apache-2.0", "devDependencies": { "@aws-amplify/plugin-types": "^1.2.0" @@ -31639,7 +31953,7 @@ "@aws-amplify/plugin-types": "^1.2.2" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0" + "aws-cdk-lib": "^2.158.0" } }, "packages/backend-platform-test-stubs": { @@ -31648,13 +31962,13 @@ "license": "Apache-2.0", "dependencies": { "@aws-amplify/plugin-types": "^1.2.2", - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }, "packages/backend-secret": { "name": "@aws-amplify/backend-secret", - "version": "1.1.3", + "version": "1.1.4", "license": "Apache-2.0", "dependencies": { "@aws-amplify/platform-core": "^1.0.5", @@ -31679,20 +31993,20 @@ "@aws-amplify/platform-core": "^1.0.6" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }, "packages/cli": { "name": "@aws-amplify/backend-cli", - "version": "1.2.9", + "version": "1.3.0", "license": "Apache-2.0", "dependencies": { "@aws-amplify/backend-deployer": "^1.1.3", - "@aws-amplify/backend-output-schemas": "^1.2.1", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/backend-secret": "^1.1.2", "@aws-amplify/cli-core": "^1.1.3", - "@aws-amplify/client-config": "^1.4.0", + "@aws-amplify/client-config": "^1.5.0", "@aws-amplify/deployed-backend-client": "^1.4.1", "@aws-amplify/form-generator": "^1.0.3", "@aws-amplify/model-generator": "^1.0.8", @@ -31839,10 +32153,10 @@ }, "packages/client-config": { "name": "@aws-amplify/client-config", - "version": "1.4.0", + "version": "1.5.0", "license": "Apache-2.0", "dependencies": { - "@aws-amplify/backend-output-schemas": "^1.2.1", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/deployed-backend-client": "^1.4.1", "@aws-amplify/model-generator": "^1.0.7", "@aws-amplify/platform-core": "^1.0.7", @@ -31976,7 +32290,7 @@ }, "packages/deployed-backend-client": { "name": "@aws-amplify/deployed-backend-client", - "version": "1.4.1", + "version": "1.4.2", "license": "Apache-2.0", "dependencies": { "@aws-amplify/backend-output-schemas": "^1.2.0", @@ -32028,11 +32342,11 @@ "license": "Apache-2.0", "devDependencies": { "@apollo/client": "^3.10.1", - "@aws-amplify/ai-constructs": "^0.2.0", + "@aws-amplify/ai-constructs": "^0.7.0", "@aws-amplify/auth-construct": "^1.3.1", - "@aws-amplify/backend": "^1.3.1", - "@aws-amplify/backend-ai": "^0.1.2", - "@aws-amplify/backend-secret": "^1.1.2", + "@aws-amplify/backend": "^1.5.2", + "@aws-amplify/backend-ai": "^0.3.4", + "@aws-amplify/backend-secret": "^1.1.4", "@aws-amplify/client-config": "^1.4.0", "@aws-amplify/data-schema": "^1.0.0", "@aws-amplify/deployed-backend-client": "^1.4.1", @@ -32053,9 +32367,10 @@ "@aws-sdk/credential-providers": "^3.624.0", "@smithy/shared-ini-file-loader": "^2.2.5", "@types/lodash.ismatch": "^4.4.9", + "@zip.js/zip.js": "^2.7.52", "aws-amplify": "^6.0.16", "aws-appsync-auth-link": "^3.0.7", - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0", "execa": "^8.0.1", "fs-extra": "^11.1.1", @@ -32145,7 +32460,7 @@ }, "peerDependencies": { "@aws-sdk/types": "^3.609.0", - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }, @@ -32267,7 +32582,7 @@ }, "packages/sandbox": { "name": "@aws-amplify/sandbox", - "version": "1.2.2", + "version": "1.2.3", "license": "Apache-2.0", "dependencies": { "@aws-amplify/backend-deployer": "^1.1.3", @@ -32293,7 +32608,7 @@ "@types/parse-gitignore": "^1.0.0" }, "peerDependencies": { - "aws-cdk": "^2.152.0" + "aws-cdk": "^2.158.0" } }, "packages/schema-generator": { @@ -32301,7 +32616,7 @@ "version": "1.2.4", "license": "Apache-2.0", "dependencies": { - "@aws-amplify/graphql-schema-generator": "^0.9.4", + "@aws-amplify/graphql-schema-generator": "^0.11.0", "@aws-amplify/platform-core": "^1.0.5" } } diff --git a/package.json b/package.json index 4a0fb2be0f..56f0ad37f5 100644 --- a/package.json +++ b/package.json @@ -57,6 +57,7 @@ "@actions/github": "^6.0.0", "@aws-sdk/client-amplify": "^3.624.0", "@aws-sdk/client-cloudformation": "^3.624.0", + "@aws-sdk/client-cloudwatch-logs": "^3.624.0", "@aws-sdk/client-cognito-identity-provider": "^3.624.0", "@aws-sdk/client-dynamodb": "^3.624.0", "@aws-sdk/client-iam": "^3.624.0", @@ -89,14 +90,14 @@ "fs-extra": "^11.1.1", "glob": "^10.1.0", "husky": "^8.0.3", - "lint-staged": "^13.2.1", + "lint-staged": "^15.2.10", "prettier": "^2.8.7", "rimraf": "^5.0.0", "semver": "^7.5.4", "tsx": "^4.6.1", "typedoc": "^0.25.3", "typescript": "~5.2.0", - "verdaccio": "^5.24.1" + "verdaccio": "^6.0.1" }, "workspaces": [ "packages/*" diff --git a/packages/ai-constructs/API.md b/packages/ai-constructs/API.md index 6719fa2b59..a5621d37df 100644 --- a/packages/ai-constructs/API.md +++ b/packages/ai-constructs/API.md @@ -25,8 +25,6 @@ export { __export__conversation } declare namespace __export__conversation__runtime { export { - ConversationMessage, - ConversationMessageContentBlock, ConversationTurnEvent, createExecutableTool, ExecutableTool, @@ -56,34 +54,15 @@ type ConversationHandlerFunctionProps = { modelId: string; region?: string; }>; + memoryMB?: number; outputStorageStrategy?: BackendOutputStorageStrategy; }; -// @public (undocumented) -type ConversationMessage = { - role: 'user' | 'assistant'; - content: Array; -}; - -// @public (undocumented) -type ConversationMessageContentBlock = bedrock.ContentBlock | { - image: Omit & { - source: { - bytes: string; - }; - }; - text?: never; - document?: never; - toolUse?: never; - toolResult?: never; - guardContent?: never; - $unknown?: never; -}; - // @public (undocumented) type ConversationTurnEvent = { conversationId: string; currentMessageId: string; + streamResponse?: boolean; responseMutation: { name: string; inputTypeName: string; @@ -103,7 +82,6 @@ type ConversationTurnEvent = { request: { headers: Record; }; - messages?: Array; messageHistoryQuery: { getQueryName: string; getQueryInputTypeName: string; diff --git a/packages/ai-constructs/CHANGELOG.md b/packages/ai-constructs/CHANGELOG.md index 174ce5d252..48831fdf10 100644 --- a/packages/ai-constructs/CHANGELOG.md +++ b/packages/ai-constructs/CHANGELOG.md @@ -1,5 +1,62 @@ # @aws-amplify/ai-constructs +## 0.8.1 + +### Patch Changes + +- 1af5060: Add metadata to user agent in conversation handler runtime. +- Updated dependencies [583a3f2] + - @aws-amplify/platform-core@1.2.0 + +## 0.8.0 + +### Minor Changes + +- 37dd87c: Propagate errors to AppSync + +### Patch Changes + +- 613bca9: Remove tool usage for non current turns when looking up message history +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [b56d344] + - @aws-amplify/plugin-types@1.3.1 + +## 0.7.0 + +### Minor Changes + +- 63fb254: Include accumulated turn content in chunk mutation + +## 0.6.2 + +### Patch Changes + +- bd4ff4d: Add memory setting to conversation handler +- Updated dependencies [5f46d8d] + - @aws-amplify/backend-output-schemas@1.4.0 + +## 0.6.1 + +### Patch Changes + +- 91e7f3c: Parse client side tool json elements + +## 0.6.0 + +### Minor Changes + +- b6761b0: Stream Bedrock responses + +## 0.5.0 + +### Minor Changes + +- 46a0e85: Remove deprecated messages field from event + +### Patch Changes + +- faacd1b: Fix case where bedrock content blocks would be populated with 'null' instead of 'undefined. + ## 0.4.0 ### Minor Changes diff --git a/packages/ai-constructs/package.json b/packages/ai-constructs/package.json index 21fbfa18c4..4ca1106feb 100644 --- a/packages/ai-constructs/package.json +++ b/packages/ai-constructs/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/ai-constructs", - "version": "0.4.0", + "version": "0.8.1", "type": "commonjs", "publishConfig": { "access": "public" @@ -26,19 +26,19 @@ }, "license": "Apache-2.0", "dependencies": { - "@aws-amplify/backend-output-schemas": "^1.3.0", - "@aws-amplify/platform-core": "^1.1.0", - "@aws-amplify/plugin-types": "^1.0.1", + "@aws-amplify/backend-output-schemas": "^1.4.0", + "@aws-amplify/platform-core": "^1.2.0", + "@aws-amplify/plugin-types": "^1.3.1", "@aws-sdk/client-bedrock-runtime": "^3.622.0", "@smithy/types": "^3.3.0", "json-schema-to-ts": "^3.1.1" }, "devDependencies": { - "@aws-amplify/backend-output-storage": "^1.1.2", + "@aws-amplify/backend-output-storage": "^1.1.3", "typescript": "^5.0.0" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } } diff --git a/packages/ai-constructs/src/conversation/conversation_handler_construct.test.ts b/packages/ai-constructs/src/conversation/conversation_handler_construct.test.ts index 93b87e8ea3..b0130e711f 100644 --- a/packages/ai-constructs/src/conversation/conversation_handler_construct.test.ts +++ b/packages/ai-constructs/src/conversation/conversation_handler_construct.test.ts @@ -82,7 +82,10 @@ void describe('Conversation Handler Function construct', () => { PolicyDocument: { Statement: [ { - Action: 'bedrock:InvokeModel', + Action: [ + 'bedrock:InvokeModel', + 'bedrock:InvokeModelWithResponseStream', + ], Effect: 'Allow', Resource: [ 'arn:aws:bedrock:region1::foundation-model/model1', @@ -115,7 +118,10 @@ void describe('Conversation Handler Function construct', () => { PolicyDocument: { Statement: [ { - Action: 'bedrock:InvokeModel', + Action: [ + 'bedrock:InvokeModel', + 'bedrock:InvokeModelWithResponseStream', + ], Effect: 'Allow', Resource: { 'Fn::Join': [ @@ -216,4 +222,66 @@ void describe('Conversation Handler Function construct', () => { Handler: 'index.handler', }); }); + + void describe('memory property', () => { + void it('sets valid memory', () => { + const app = new App(); + const stack = new Stack(app); + new ConversationHandlerFunction(stack, 'conversationHandler', { + models: [], + memoryMB: 234, + }); + const template = Template.fromStack(stack); + + template.hasResourceProperties('AWS::Lambda::Function', { + MemorySize: 234, + }); + }); + + void it('sets default memory', () => { + const app = new App(); + const stack = new Stack(app); + new ConversationHandlerFunction(stack, 'conversationHandler', { + models: [], + }); + const template = Template.fromStack(stack); + + template.hasResourceProperties('AWS::Lambda::Function', { + MemorySize: 512, + }); + }); + + void it('throws on memory below 128 MB', () => { + assert.throws(() => { + const app = new App(); + const stack = new Stack(app); + new ConversationHandlerFunction(stack, 'conversationHandler', { + models: [], + memoryMB: 127, + }); + }, new Error('memoryMB must be a whole number between 128 and 10240 inclusive')); + }); + + void it('throws on memory above 10240 MB', () => { + assert.throws(() => { + const app = new App(); + const stack = new Stack(app); + new ConversationHandlerFunction(stack, 'conversationHandler', { + models: [], + memoryMB: 10241, + }); + }, new Error('memoryMB must be a whole number between 128 and 10240 inclusive')); + }); + + void it('throws on fractional memory', () => { + assert.throws(() => { + const app = new App(); + const stack = new Stack(app); + new ConversationHandlerFunction(stack, 'conversationHandler', { + models: [], + memoryMB: 256.2, + }); + }, new Error('memoryMB must be a whole number between 128 and 10240 inclusive')); + }); + }); }); diff --git a/packages/ai-constructs/src/conversation/conversation_handler_construct.ts b/packages/ai-constructs/src/conversation/conversation_handler_construct.ts index 8f3c90efbd..995b92fed6 100644 --- a/packages/ai-constructs/src/conversation/conversation_handler_construct.ts +++ b/packages/ai-constructs/src/conversation/conversation_handler_construct.ts @@ -34,6 +34,12 @@ export type ConversationHandlerFunctionProps = { modelId: string; region?: string; }>; + /** + * An amount of memory (RAM) to allocate to the function between 128 and 10240 MB. + * Must be a whole number. + * Default is 512MB. + */ + memoryMB?: number; /** * @internal */ @@ -86,6 +92,7 @@ export class ConversationHandlerFunction timeout: Duration.seconds(60), entry: this.props.entry ?? defaultHandlerFilePath, handler: 'handler', + memorySize: this.resolveMemory(), bundling: { // Do not bundle SDK if conversation handler is using our default implementation which is // compatible with Lambda provided SDK. @@ -117,7 +124,10 @@ export class ConversationHandlerFunction conversationHandler.addToRolePolicy( new PolicyStatement({ effect: Effect.ALLOW, - actions: ['bedrock:InvokeModel'], + actions: [ + 'bedrock:InvokeModel', + 'bedrock:InvokeModelWithResponseStream', + ], resources, }) ); @@ -150,4 +160,27 @@ export class ConversationHandlerFunction }, }); }; + + private resolveMemory = () => { + const memoryMin = 128; + const memoryMax = 10240; + const memoryDefault = 512; + if (this.props.memoryMB === undefined) { + return memoryDefault; + } + if ( + !isWholeNumberBetweenInclusive(this.props.memoryMB, memoryMin, memoryMax) + ) { + throw new Error( + `memoryMB must be a whole number between ${memoryMin} and ${memoryMax} inclusive` + ); + } + return this.props.memoryMB; + }; } + +const isWholeNumberBetweenInclusive = ( + test: number, + min: number, + max: number +) => min <= test && test <= max && test % 1 === 0; diff --git a/packages/ai-constructs/src/conversation/runtime/bedrock_converse_adapter.test.ts b/packages/ai-constructs/src/conversation/runtime/bedrock_converse_adapter.test.ts index d1c2afedbe..38a32a0128 100644 --- a/packages/ai-constructs/src/conversation/runtime/bedrock_converse_adapter.test.ts +++ b/packages/ai-constructs/src/conversation/runtime/bedrock_converse_adapter.test.ts @@ -4,15 +4,20 @@ import { ConversationMessage, ConversationTurnEvent, ExecutableTool, + StreamingResponseChunk, ToolDefinition, } from './types'; import { BedrockConverseAdapter } from './bedrock_converse_adapter'; import { BedrockRuntimeClient, + ContentBlock, ConverseCommand, ConverseCommandInput, ConverseCommandOutput, + ConverseStreamCommandOutput, + ConverseStreamOutput, Message, + StopReason, ToolConfiguration, ToolInputSchema, ToolResultContentBlock, @@ -20,11 +25,12 @@ import { import { ConversationTurnEventToolsProvider } from './event-tools-provider'; import { randomBytes, randomUUID } from 'node:crypto'; import { ConversationMessageHistoryRetriever } from './conversation_message_history_retriever'; +import { UserAgentProvider } from './user_agent_provider'; void describe('Bedrock converse adapter', () => { const commonEvent: Readonly = { - conversationId: '', - currentMessageId: '', + conversationId: 'testConversationId', + currentMessageId: 'testCurrentMessageId', graphqlApiEndpoint: '', messageHistoryQuery: { getQueryName: '', @@ -70,315 +76,751 @@ void describe('Bedrock converse adapter', () => { } ); - void it('calls bedrock to get conversation response', async () => { - const event: ConversationTurnEvent = { - ...commonEvent, - }; + [false, true].forEach((streamResponse) => { + // This is a common set of use cases that both streaming and non-streaming version must support. + void describe(`${streamResponse ? 'with' : 'without'} streaming`, () => { + void it('calls bedrock to get conversation response', async () => { + const event: ConversationTurnEvent = { + ...commonEvent, + }; - const bedrockClient = new BedrockRuntimeClient(); - const bedrockResponse: ConverseCommandOutput = { - $metadata: {}, - metrics: undefined, - output: { - message: { - role: 'assistant', - content: [ + const bedrockClient = new BedrockRuntimeClient(); + const content = [{ text: 'block1' }, { text: 'block2' }]; + const bedrockResponse = mockBedrockResponse(content, streamResponse); + const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => + Promise.resolve(bedrockResponse) + ); + + const adapter = new BedrockConverseAdapter( + event, + [], + bedrockClient, + undefined, + messageHistoryRetriever + ); + + if (streamResponse) { + const chunks: Array = + await askBedrockWithStreaming(adapter); + // Assertion below is verbose on purpose to assert that correct indexes are rendered. + // See mockConverseStreamCommandOutput below of how split chunks are mocked. + assert.deepStrictEqual(chunks, [ + { + accumulatedTurnContent: [ + { + text: 'b', + }, + ], + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + contentBlockText: 'b', + contentBlockIndex: 0, + contentBlockDeltaIndex: 0, + }, + { + accumulatedTurnContent: [ + { + text: 'block1', + }, + ], + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + contentBlockText: 'lock1', + contentBlockIndex: 0, + contentBlockDeltaIndex: 1, + }, + { + accumulatedTurnContent: [ + { + text: 'block1', + }, + ], + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + contentBlockIndex: 0, + contentBlockDoneAtIndex: 1, + }, + { + accumulatedTurnContent: [ + { + text: 'block1', + }, + { + text: 'b', + }, + ], + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + contentBlockText: 'b', + contentBlockIndex: 1, + contentBlockDeltaIndex: 0, + }, + { + accumulatedTurnContent: [ + { + text: 'block1', + }, + { + text: 'block2', + }, + ], + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + contentBlockText: 'lock2', + contentBlockIndex: 1, + contentBlockDeltaIndex: 1, + }, + { + accumulatedTurnContent: [ + { + text: 'block1', + }, + { + text: 'block2', + }, + ], + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + contentBlockIndex: 1, + contentBlockDoneAtIndex: 1, + }, { - text: 'block1', + accumulatedTurnContent: [ + { + text: 'block1', + }, + { + text: 'block2', + }, + ], + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + contentBlockIndex: 1, + stopReason: 'end_turn', }, + ]); + } else { + const responseContent = await adapter.askBedrock(); + assert.deepStrictEqual(responseContent, content); + } + + assert.strictEqual(bedrockClientSendMock.mock.calls.length, 1); + const bedrockRequest = bedrockClientSendMock.mock.calls[0] + .arguments[0] as unknown as ConverseCommand; + const expectedBedrockInput: ConverseCommandInput = { + messages: messages as Array, + modelId: event.modelConfiguration.modelId, + inferenceConfig: event.modelConfiguration.inferenceConfiguration, + system: [ { - text: 'block2', + text: event.modelConfiguration.systemPrompt, }, ], - }, - }, - stopReason: 'end_turn', - usage: undefined, - }; - const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => - Promise.resolve(bedrockResponse) - ); + toolConfig: undefined, + }; + assert.deepStrictEqual(bedrockRequest.input, expectedBedrockInput); + }); - const responseContent = await new BedrockConverseAdapter( - event, - [], - bedrockClient, - undefined, - messageHistoryRetriever - ).askBedrock(); + void it('uses executable tools while calling bedrock', async () => { + const additionalToolOutput: ToolResultContentBlock = { + text: 'additionalToolOutput', + }; + const additionalTool: ExecutableTool = { + name: 'additionalTool', + description: 'additional tool description', + inputSchema: { + json: { + required: ['additionalToolRequiredProperty'], + }, + }, + execute: () => Promise.resolve(additionalToolOutput), + }; + const eventToolOutput: ToolResultContentBlock = { + text: 'eventToolOutput', + }; + const eventTool: ExecutableTool = { + name: 'eventTool', + description: 'event tool description', + inputSchema: { + json: { + required: ['eventToolRequiredProperty'], + }, + }, + execute: () => Promise.resolve(eventToolOutput), + }; - assert.deepStrictEqual( - responseContent, - bedrockResponse.output?.message?.content - ); + const event: ConversationTurnEvent = { + ...commonEvent, + }; - assert.strictEqual(bedrockClientSendMock.mock.calls.length, 1); - const bedrockRequest = bedrockClientSendMock.mock.calls[0] - .arguments[0] as unknown as ConverseCommand; - const expectedBedrockInput: ConverseCommandInput = { - messages: messages as Array, - modelId: event.modelConfiguration.modelId, - inferenceConfig: event.modelConfiguration.inferenceConfiguration, - system: [ - { - text: event.modelConfiguration.systemPrompt, - }, - ], - toolConfig: undefined, - }; - assert.deepStrictEqual(bedrockRequest.input, expectedBedrockInput); - }); + const bedrockClient = new BedrockRuntimeClient(); + const bedrockResponseQueue: Array< + ConverseCommandOutput | ConverseStreamCommandOutput + > = []; + const additionalToolUse1 = { + toolUseId: randomUUID().toString(), + name: additionalTool.name, + input: 'additionalToolInput1', + }; + const additionalToolUse2 = { + toolUseId: randomUUID().toString(), + name: additionalTool.name, + input: 'additionalToolInput2', + }; + const additionalToolUseBedrockResponse = mockBedrockResponse( + [ + { + toolUse: additionalToolUse1, + }, + { + toolUse: additionalToolUse2, + }, + ], + streamResponse + ); + bedrockResponseQueue.push(additionalToolUseBedrockResponse); + const eventToolUse1 = { + toolUseId: randomUUID().toString(), + name: eventTool.name, + input: 'eventToolInput1', + }; + const eventToolUse2 = { + toolUseId: randomUUID().toString(), + name: eventTool.name, + input: 'eventToolInput2', + }; + const eventToolUseBedrockResponse = mockBedrockResponse( + [ + { + toolUse: eventToolUse1, + }, + { + toolUse: eventToolUse2, + }, + ], + streamResponse + ); + bedrockResponseQueue.push(eventToolUseBedrockResponse); + const content = [ + { + text: 'finalResponse', + }, + ]; + const finalBedrockResponse = mockBedrockResponse( + content, + streamResponse + ); + bedrockResponseQueue.push(finalBedrockResponse); - void it('uses executable tools while calling bedrock', async () => { - const additionalToolOutput: ToolResultContentBlock = { - text: 'additionalToolOutput', - }; - const additionalTool: ExecutableTool = { - name: 'additionalTool', - description: 'additional tool description', - inputSchema: { - json: { - required: ['additionalToolRequiredProperty'], - }, - }, - execute: () => Promise.resolve(additionalToolOutput), - }; - const eventToolOutput: ToolResultContentBlock = { - text: 'eventToolOutput', - }; - const eventTool: ExecutableTool = { - name: 'eventTool', - description: 'event tool description', - inputSchema: { - json: { - required: ['eventToolRequiredProperty'], - }, - }, - execute: () => Promise.resolve(eventToolOutput), - }; + const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => + Promise.resolve(bedrockResponseQueue.shift()) + ); - const event: ConversationTurnEvent = { - ...commonEvent, - }; + const eventToolsProvider = new ConversationTurnEventToolsProvider( + event + ); + mock.method(eventToolsProvider, 'getEventTools', () => [eventTool]); - const bedrockClient = new BedrockRuntimeClient(); - const bedrockResponseQueue: Array = []; - const additionalToolUseBedrockResponse: ConverseCommandOutput = { - $metadata: {}, - metrics: undefined, - output: { - message: { - role: 'assistant', - content: [ + const adapter = new BedrockConverseAdapter( + event, + [additionalTool], + bedrockClient, + eventToolsProvider, + messageHistoryRetriever + ); + if (streamResponse) { + const chunks: Array = + await askBedrockWithStreaming(adapter); + const responseText = chunks.reduce((acc, next) => { + if (next.contentBlockText) { + acc += next.contentBlockText; + } + return acc; + }, ''); + assert.strictEqual(responseText, 'finalResponse'); + } else { + const responseContent = await adapter.askBedrock(); + assert.deepStrictEqual(responseContent, content); + } + + assert.strictEqual(bedrockClientSendMock.mock.calls.length, 3); + const expectedToolConfig: ToolConfiguration = { + tools: [ { - toolUse: { - toolUseId: randomUUID().toString(), - name: additionalTool.name, - input: 'additionalToolInput1', + toolSpec: { + name: eventTool.name, + description: eventTool.description, + inputSchema: eventTool.inputSchema as ToolInputSchema, }, }, { - toolUse: { - toolUseId: randomUUID().toString(), + toolSpec: { name: additionalTool.name, - input: 'additionalToolInput2', + description: additionalTool.description, + inputSchema: additionalTool.inputSchema as ToolInputSchema, }, }, ], - }, - }, - stopReason: 'tool_use', - usage: undefined, - }; - bedrockResponseQueue.push(additionalToolUseBedrockResponse); - const eventToolUseBedrockResponse: ConverseCommandOutput = { - $metadata: {}, - metrics: undefined, - output: { - message: { - role: 'assistant', - content: [ + }; + const expectedBedrockInputCommonProperties = { + modelId: event.modelConfiguration.modelId, + inferenceConfig: event.modelConfiguration.inferenceConfiguration, + system: [ { - toolUse: { - toolUseId: randomUUID().toString(), - name: eventTool.name, - input: 'eventToolToolInput1', - }, + text: event.modelConfiguration.systemPrompt, }, + ], + toolConfig: expectedToolConfig, + }; + const bedrockRequest1 = bedrockClientSendMock.mock.calls[0] + .arguments[0] as unknown as ConverseCommand; + const expectedBedrockInput1: ConverseCommandInput = { + messages: messages as Array, + ...expectedBedrockInputCommonProperties, + }; + assert.deepStrictEqual(bedrockRequest1.input, expectedBedrockInput1); + const bedrockRequest2 = bedrockClientSendMock.mock.calls[1] + .arguments[0] as unknown as ConverseCommand; + const expectedBedrockInput2: ConverseCommandInput = { + messages: [ + ...(messages as Array), { - toolUse: { - toolUseId: randomUUID().toString(), - name: eventTool.name, - input: 'eventToolToolInput2', - }, + role: 'assistant', + content: [ + { toolUse: additionalToolUse1 }, + { toolUse: additionalToolUse2 }, + ], + }, + { + role: 'user', + content: [ + { + toolResult: { + content: [additionalToolOutput], + status: 'success', + toolUseId: additionalToolUse1.toolUseId, + }, + }, + { + toolResult: { + content: [additionalToolOutput], + status: 'success', + toolUseId: additionalToolUse2.toolUseId, + }, + }, + ], }, ], - }, - }, - stopReason: 'tool_use', - usage: undefined, - }; - bedrockResponseQueue.push(eventToolUseBedrockResponse); - const finalBedrockResponse: ConverseCommandOutput = { - $metadata: {}, - metrics: undefined, - output: { - message: { - role: 'assistant', - content: [ + ...expectedBedrockInputCommonProperties, + }; + assert.deepStrictEqual(bedrockRequest2.input, expectedBedrockInput2); + const bedrockRequest3 = bedrockClientSendMock.mock.calls[2] + .arguments[0] as unknown as ConverseCommand; + assert.ok(expectedBedrockInput2.messages); + const expectedBedrockInput3: ConverseCommandInput = { + messages: [ + ...expectedBedrockInput2.messages, { - text: 'block1', + role: 'assistant', + content: [{ toolUse: eventToolUse1 }, { toolUse: eventToolUse2 }], }, { - text: 'block2', + role: 'user', + content: [ + { + toolResult: { + content: [eventToolOutput], + status: 'success', + toolUseId: eventToolUse1.toolUseId, + }, + }, + { + toolResult: { + content: [eventToolOutput], + status: 'success', + toolUseId: eventToolUse2.toolUseId, + }, + }, + ], }, ], - }, - }, - stopReason: 'end_turn', - usage: undefined, - }; - bedrockResponseQueue.push(finalBedrockResponse); + ...expectedBedrockInputCommonProperties, + }; + assert.deepStrictEqual(bedrockRequest3.input, expectedBedrockInput3); + }); - const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => - Promise.resolve(bedrockResponseQueue.shift()) - ); + void it('executable tool error is reported to bedrock', async () => { + const tool: ExecutableTool = { + name: 'testTool', + description: 'tool description', + inputSchema: { + json: {}, + }, + execute: () => Promise.reject(new Error('Test tool error')), + }; - const eventToolsProvider = new ConversationTurnEventToolsProvider(event); - mock.method(eventToolsProvider, 'getEventTools', () => [eventTool]); + const event: ConversationTurnEvent = { + ...commonEvent, + }; - const responseContent = await new BedrockConverseAdapter( - event, - [additionalTool], - bedrockClient, - eventToolsProvider, - messageHistoryRetriever - ).askBedrock(); + const bedrockClient = new BedrockRuntimeClient(); + const bedrockResponseQueue: Array< + ConverseCommandOutput | ConverseStreamCommandOutput + > = []; + const toolUse = { + toolUseId: randomUUID().toString(), + name: tool.name, + input: 'testTool', + }; + const toolUseBedrockResponse = mockBedrockResponse( + [ + { + toolUse, + }, + ], + streamResponse + ); + bedrockResponseQueue.push(toolUseBedrockResponse); + const content = [{ text: 'finalResponse' }]; + const finalBedrockResponse = mockBedrockResponse( + content, + streamResponse + ); + bedrockResponseQueue.push(finalBedrockResponse); - assert.deepStrictEqual( - responseContent, - finalBedrockResponse.output?.message?.content - ); + const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => + Promise.resolve(bedrockResponseQueue.shift()) + ); - assert.strictEqual(bedrockClientSendMock.mock.calls.length, 3); - const expectedToolConfig: ToolConfiguration = { - tools: [ - { - toolSpec: { - name: eventTool.name, - description: eventTool.description, - inputSchema: eventTool.inputSchema as ToolInputSchema, - }, - }, - { - toolSpec: { - name: additionalTool.name, - description: additionalTool.description, - inputSchema: additionalTool.inputSchema as ToolInputSchema, - }, - }, - ], - }; - const expectedBedrockInputCommonProperties = { - modelId: event.modelConfiguration.modelId, - inferenceConfig: event.modelConfiguration.inferenceConfiguration, - system: [ - { - text: event.modelConfiguration.systemPrompt, - }, - ], - toolConfig: expectedToolConfig, - }; - const bedrockRequest1 = bedrockClientSendMock.mock.calls[0] - .arguments[0] as unknown as ConverseCommand; - const expectedBedrockInput1: ConverseCommandInput = { - messages: messages as Array, - ...expectedBedrockInputCommonProperties, - }; - assert.deepStrictEqual(bedrockRequest1.input, expectedBedrockInput1); - const bedrockRequest2 = bedrockClientSendMock.mock.calls[1] - .arguments[0] as unknown as ConverseCommand; - assert.ok(additionalToolUseBedrockResponse.output?.message?.content); - assert.ok( - additionalToolUseBedrockResponse.output?.message?.content[0].toolUse - ?.toolUseId - ); - assert.ok( - additionalToolUseBedrockResponse.output?.message?.content[1].toolUse - ?.toolUseId - ); - const expectedBedrockInput2: ConverseCommandInput = { - messages: [ - ...(messages as Array), - additionalToolUseBedrockResponse.output?.message, - { + const adapter = new BedrockConverseAdapter( + event, + [tool], + bedrockClient, + undefined, + messageHistoryRetriever + ); + if (streamResponse) { + const chunks: Array = + await askBedrockWithStreaming(adapter); + const responseText = chunks.reduce((acc, next) => { + if (next.contentBlockText) { + acc += next.contentBlockText; + } + return acc; + }, ''); + assert.strictEqual(responseText, 'finalResponse'); + } else { + const responseContent = await adapter.askBedrock(); + assert.deepStrictEqual(responseContent, content); + } + + assert.strictEqual(bedrockClientSendMock.mock.calls.length, 2); + const bedrockRequest2 = bedrockClientSendMock.mock.calls[1] + .arguments[0] as unknown as ConverseCommand; + assert.deepStrictEqual(bedrockRequest2.input.messages?.pop(), { role: 'user', content: [ { toolResult: { - content: [additionalToolOutput], - status: 'success', - toolUseId: - additionalToolUseBedrockResponse.output?.message.content[0] - .toolUse.toolUseId, + content: [ + { + text: 'Error: Test tool error', + }, + ], + status: 'error', + toolUseId: toolUse.toolUseId, }, }, + ], + } as Message); + }); + + void it('executable tool error of unknown type is reported to bedrock', async () => { + const tool: ExecutableTool = { + name: 'testTool', + description: 'tool description', + inputSchema: { + json: {}, + }, + // This is intentional to cover logical branch that test for error type. + // eslint-disable-next-line prefer-promise-reject-errors + execute: () => Promise.reject('Test tool error'), + }; + + const event: ConversationTurnEvent = { + ...commonEvent, + }; + + const bedrockClient = new BedrockRuntimeClient(); + const bedrockResponseQueue: Array< + ConverseCommandOutput | ConverseStreamCommandOutput + > = []; + const toolUse = { + toolUseId: randomUUID().toString(), + name: tool.name, + input: 'testTool', + }; + const toolUseBedrockResponse = mockBedrockResponse( + [ { - toolResult: { - content: [additionalToolOutput], - status: 'success', - toolUseId: - additionalToolUseBedrockResponse.output?.message.content[1] - .toolUse.toolUseId, - }, + toolUse, }, ], - }, - ], - ...expectedBedrockInputCommonProperties, - }; - assert.deepStrictEqual(bedrockRequest2.input, expectedBedrockInput2); - const bedrockRequest3 = bedrockClientSendMock.mock.calls[2] - .arguments[0] as unknown as ConverseCommand; - assert.ok(eventToolUseBedrockResponse.output?.message?.content); - assert.ok( - eventToolUseBedrockResponse.output?.message?.content[0].toolUse?.toolUseId - ); - assert.ok( - eventToolUseBedrockResponse.output?.message?.content[1].toolUse?.toolUseId - ); - assert.ok(expectedBedrockInput2.messages); - const expectedBedrockInput3: ConverseCommandInput = { - messages: [ - ...expectedBedrockInput2.messages, - eventToolUseBedrockResponse.output?.message, - { + streamResponse + ); + bedrockResponseQueue.push(toolUseBedrockResponse); + const content = [{ text: 'finalResponse' }]; + const finalBedrockResponse = mockBedrockResponse( + content, + streamResponse + ); + bedrockResponseQueue.push(finalBedrockResponse); + + const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => + Promise.resolve(bedrockResponseQueue.shift()) + ); + + const adapter = new BedrockConverseAdapter( + event, + [tool], + bedrockClient, + undefined, + messageHistoryRetriever + ); + if (streamResponse) { + const chunks: Array = + await askBedrockWithStreaming(adapter); + const responseText = chunks.reduce((acc, next) => { + if (next.contentBlockText) { + acc += next.contentBlockText; + } + return acc; + }, ''); + assert.strictEqual(responseText, 'finalResponse'); + } else { + const responseContent = await adapter.askBedrock(); + assert.deepStrictEqual(responseContent, content); + } + + assert.strictEqual(bedrockClientSendMock.mock.calls.length, 2); + const bedrockRequest2 = bedrockClientSendMock.mock.calls[1] + .arguments[0] as unknown as ConverseCommand; + assert.deepStrictEqual(bedrockRequest2.input.messages?.pop(), { role: 'user', content: [ { toolResult: { - content: [eventToolOutput], - status: 'success', - toolUseId: - eventToolUseBedrockResponse.output?.message.content[0].toolUse - .toolUseId, + content: [ + { + text: 'unknown error occurred', + }, + ], + status: 'error', + toolUseId: toolUse.toolUseId, }, }, + ], + } as Message); + }); + + void it('returns client tool input block when client tool is requested and ignores executable tools', async () => { + const additionalToolOutput: ToolResultContentBlock = { + text: 'additionalToolOutput', + }; + const additionalTool: ExecutableTool = { + name: 'additionalTool', + description: 'additional tool description', + inputSchema: { + json: { + required: ['additionalToolRequiredProperty'], + }, + }, + execute: () => Promise.resolve(additionalToolOutput), + }; + const clientTool: ToolDefinition = { + name: 'clientTool', + description: 'client tool description', + inputSchema: { + json: { + required: ['clientToolRequiredProperty'], + }, + }, + }; + + const event: ConversationTurnEvent = { + ...commonEvent, + toolsConfiguration: { + clientTools: [clientTool], + }, + }; + + const bedrockClient = new BedrockRuntimeClient(); + const bedrockResponseQueue: Array< + ConverseCommandOutput | ConverseStreamCommandOutput + > = []; + const additionalToolUse = { + toolUseId: randomUUID().toString(), + name: additionalTool.name, + input: 'additionalToolInput', + }; + const clientToolUse = { + toolUseId: randomUUID().toString(), + name: clientTool.name, + input: 'clientToolInput', + }; + const toolUseBedrockResponse = mockBedrockResponse( + [ { - toolResult: { - content: [eventToolOutput], - status: 'success', - toolUseId: - eventToolUseBedrockResponse.output?.message.content[1].toolUse - .toolUseId, + toolUse: additionalToolUse, + }, + { toolUse: clientToolUse }, + ], + streamResponse + ); + bedrockResponseQueue.push(toolUseBedrockResponse); + + const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => + Promise.resolve(bedrockResponseQueue.shift()) + ); + + const adapter = new BedrockConverseAdapter( + event, + [additionalTool], + bedrockClient, + undefined, + messageHistoryRetriever + ); + + if (streamResponse) { + const chunks: Array = + await askBedrockWithStreaming(adapter); + assert.deepStrictEqual(chunks, [ + { + accumulatedTurnContent: [{ toolUse: clientToolUse }], + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + contentBlockIndex: 0, + contentBlockToolUse: JSON.stringify({ toolUse: clientToolUse }), + }, + { + accumulatedTurnContent: [{ toolUse: clientToolUse }], + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + contentBlockIndex: 0, + stopReason: 'tool_use', + }, + ]); + } else { + const responseContent = await adapter.askBedrock(); + assert.deepStrictEqual(responseContent, [ + { + toolUse: clientToolUse, + }, + ]); + } + + assert.strictEqual(bedrockClientSendMock.mock.calls.length, 1); + const expectedToolConfig: ToolConfiguration = { + tools: [ + { + toolSpec: { + name: additionalTool.name, + description: additionalTool.description, + inputSchema: additionalTool.inputSchema as ToolInputSchema, + }, + }, + { + toolSpec: { + name: clientTool.name, + description: clientTool.description, + inputSchema: clientTool.inputSchema as ToolInputSchema, }, }, ], - }, - ], - ...expectedBedrockInputCommonProperties, - }; - assert.deepStrictEqual(bedrockRequest3.input, expectedBedrockInput3); + }; + const expectedBedrockInputCommonProperties = { + modelId: event.modelConfiguration.modelId, + inferenceConfig: event.modelConfiguration.inferenceConfiguration, + system: [ + { + text: event.modelConfiguration.systemPrompt, + }, + ], + toolConfig: expectedToolConfig, + }; + const bedrockRequest = bedrockClientSendMock.mock.calls[0] + .arguments[0] as unknown as ConverseCommand; + const expectedBedrockInput: ConverseCommandInput = { + messages: messages as Array, + ...expectedBedrockInputCommonProperties, + }; + assert.deepStrictEqual(bedrockRequest.input, expectedBedrockInput); + }); + + void it('decodes base64 encoded images', async () => { + const event: ConversationTurnEvent = { + ...commonEvent, + }; + + const fakeImagePayload = randomBytes(32); + + messageHistoryRetrieverMockGetEventMessages.mock.mockImplementationOnce( + () => { + return Promise.resolve([ + { + id: '', + conversationId: '', + role: 'user', + content: [ + { + image: { + format: 'png', + source: { + bytes: fakeImagePayload.toString('base64'), + }, + }, + }, + ], + }, + ]); + } + ); + + const bedrockClient = new BedrockRuntimeClient(); + const content = [{ text: 'block1' }, { text: 'block2' }]; + const bedrockResponse = mockBedrockResponse(content, streamResponse); + const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => + Promise.resolve(bedrockResponse) + ); + + await new BedrockConverseAdapter( + event, + [], + bedrockClient, + undefined, + messageHistoryRetriever + ).askBedrock(); + + assert.strictEqual(bedrockClientSendMock.mock.calls.length, 1); + const bedrockRequest = bedrockClientSendMock.mock.calls[0] + .arguments[0] as unknown as ConverseCommand; + assert.deepStrictEqual(bedrockRequest.input.messages, [ + { + role: 'user', + content: [ + { + image: { + format: 'png', + source: { + bytes: fakeImagePayload, + }, + }, + }, + ], + }, + ]); + }); + }); }); void it('throws if tool is duplicated', () => { @@ -451,412 +893,25 @@ void describe('Bedrock converse adapter', () => { ); }); - void it('executable tool error is reported to bedrock', async () => { - const tool: ExecutableTool = { - name: 'testTool', - description: 'tool description', - inputSchema: { - json: {}, - }, - execute: () => Promise.reject(new Error('Test tool error')), - }; - - const event: ConversationTurnEvent = { - ...commonEvent, - }; - - const bedrockClient = new BedrockRuntimeClient(); - const bedrockResponseQueue: Array = []; - const toolUseBedrockResponse: ConverseCommandOutput = { - $metadata: {}, - metrics: undefined, - output: { - message: { - role: 'assistant', - content: [ - { - toolUse: { - toolUseId: randomUUID().toString(), - name: tool.name, - input: 'testTool', - }, - }, - ], - }, - }, - stopReason: 'tool_use', - usage: undefined, - }; - bedrockResponseQueue.push(toolUseBedrockResponse); - const finalBedrockResponse: ConverseCommandOutput = { - $metadata: {}, - metrics: undefined, - output: { - message: { - role: 'assistant', - content: [ - { - text: 'finalResponse', - }, - ], - }, - }, - stopReason: 'end_turn', - usage: undefined, - }; - bedrockResponseQueue.push(finalBedrockResponse); - - const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => - Promise.resolve(bedrockResponseQueue.shift()) - ); - - const responseContent = await new BedrockConverseAdapter( - event, - [tool], - bedrockClient, - undefined, - messageHistoryRetriever - ).askBedrock(); - - assert.deepStrictEqual( - responseContent, - finalBedrockResponse.output?.message?.content - ); - - assert.strictEqual(bedrockClientSendMock.mock.calls.length, 2); - const bedrockRequest2 = bedrockClientSendMock.mock.calls[1] - .arguments[0] as unknown as ConverseCommand; - assert.ok(toolUseBedrockResponse.output?.message?.content); - assert.deepStrictEqual(bedrockRequest2.input.messages?.pop(), { - role: 'user', - content: [ - { - toolResult: { - content: [ - { - text: 'Error: Test tool error', - }, - ], - status: 'error', - toolUseId: - toolUseBedrockResponse.output?.message.content[0].toolUse - ?.toolUseId, - }, - }, - ], - } as Message); - }); - - void it('executable tool error of unknown type is reported to bedrock', async () => { - const tool: ExecutableTool = { - name: 'testTool', - description: 'tool description', - inputSchema: { - json: {}, - }, - // This is intentional to cover logical branch that test for error type. - // eslint-disable-next-line prefer-promise-reject-errors - execute: () => Promise.reject('Test tool error'), - }; - - const event: ConversationTurnEvent = { - ...commonEvent, - }; - - const bedrockClient = new BedrockRuntimeClient(); - const bedrockResponseQueue: Array = []; - const toolUseBedrockResponse: ConverseCommandOutput = { - $metadata: {}, - metrics: undefined, - output: { - message: { - role: 'assistant', - content: [ - { - toolUse: { - toolUseId: randomUUID().toString(), - name: tool.name, - input: 'testTool', - }, - }, - ], - }, - }, - stopReason: 'tool_use', - usage: undefined, - }; - bedrockResponseQueue.push(toolUseBedrockResponse); - const finalBedrockResponse: ConverseCommandOutput = { - $metadata: {}, - metrics: undefined, - output: { - message: { - role: 'assistant', - content: [ - { - text: 'finalResponse', - }, - ], - }, - }, - stopReason: 'end_turn', - usage: undefined, - }; - bedrockResponseQueue.push(finalBedrockResponse); - - const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => - Promise.resolve(bedrockResponseQueue.shift()) - ); - - const responseContent = await new BedrockConverseAdapter( - event, - [tool], - bedrockClient, - undefined, - messageHistoryRetriever - ).askBedrock(); - - assert.deepStrictEqual( - responseContent, - finalBedrockResponse.output?.message?.content - ); - - assert.strictEqual(bedrockClientSendMock.mock.calls.length, 2); - const bedrockRequest2 = bedrockClientSendMock.mock.calls[1] - .arguments[0] as unknown as ConverseCommand; - assert.ok(toolUseBedrockResponse.output?.message?.content); - assert.deepStrictEqual(bedrockRequest2.input.messages?.pop(), { - role: 'user', - content: [ - { - toolResult: { - content: [ - { - text: 'unknown error occurred', - }, - ], - status: 'error', - toolUseId: - toolUseBedrockResponse.output?.message.content[0].toolUse - ?.toolUseId, - }, - }, - ], - } as Message); - }); - - void it('returns client tool input block when client tool is requested and ignores executable tools', async () => { - const additionalToolOutput: ToolResultContentBlock = { - text: 'additionalToolOutput', - }; - const additionalTool: ExecutableTool = { - name: 'additionalTool', - description: 'additional tool description', - inputSchema: { - json: { - required: ['additionalToolRequiredProperty'], - }, - }, - execute: () => Promise.resolve(additionalToolOutput), - }; - const clientTool: ToolDefinition = { - name: 'clientTool', - description: 'client tool description', - inputSchema: { - json: { - required: ['clientToolRequiredProperty'], - }, - }, - }; - - const event: ConversationTurnEvent = { - ...commonEvent, - toolsConfiguration: { - clientTools: [clientTool], - }, - }; - - const bedrockClient = new BedrockRuntimeClient(); - const bedrockResponseQueue: Array = []; - const clientToolUseBlock = { - toolUse: { - toolUseId: randomUUID().toString(), - name: clientTool.name, - input: 'clientToolInput', - }, - }; - const toolUseBedrockResponse: ConverseCommandOutput = { - $metadata: {}, - metrics: undefined, - output: { - message: { - role: 'assistant', - content: [ - { - toolUse: { - toolUseId: randomUUID().toString(), - name: additionalTool.name, - input: 'additionalToolInput', - }, - }, - clientToolUseBlock, - ], - }, - }, - stopReason: 'tool_use', - usage: undefined, - }; - bedrockResponseQueue.push(toolUseBedrockResponse); - - const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => - Promise.resolve(bedrockResponseQueue.shift()) - ); - - const responseContent = await new BedrockConverseAdapter( - event, - [additionalTool], - bedrockClient, - undefined, - messageHistoryRetriever - ).askBedrock(); - - assert.deepStrictEqual(responseContent, [clientToolUseBlock]); - - assert.strictEqual(bedrockClientSendMock.mock.calls.length, 1); - const expectedToolConfig: ToolConfiguration = { - tools: [ - { - toolSpec: { - name: additionalTool.name, - description: additionalTool.description, - inputSchema: additionalTool.inputSchema as ToolInputSchema, - }, - }, - { - toolSpec: { - name: clientTool.name, - description: clientTool.description, - inputSchema: clientTool.inputSchema as ToolInputSchema, - }, - }, - ], - }; - const expectedBedrockInputCommonProperties = { - modelId: event.modelConfiguration.modelId, - inferenceConfig: event.modelConfiguration.inferenceConfiguration, - system: [ - { - text: event.modelConfiguration.systemPrompt, - }, - ], - toolConfig: expectedToolConfig, - }; - const bedrockRequest = bedrockClientSendMock.mock.calls[0] - .arguments[0] as unknown as ConverseCommand; - const expectedBedrockInput: ConverseCommandInput = { - messages: messages as Array, - ...expectedBedrockInputCommonProperties, - }; - assert.deepStrictEqual(bedrockRequest.input, expectedBedrockInput); - }); - - void it('decodes base64 encoded images', async () => { - const event: ConversationTurnEvent = { - ...commonEvent, - }; - - const fakeImagePayload = randomBytes(32); - - messageHistoryRetrieverMockGetEventMessages.mock.mockImplementationOnce( - () => { - return Promise.resolve([ - { - id: '', - conversationId: '', - role: 'user', - content: [ - { - image: { - format: 'png', - source: { - bytes: fakeImagePayload.toString('base64'), - }, - }, - }, - ], - }, - ]); - } - ); - - const bedrockClient = new BedrockRuntimeClient(); - const bedrockResponse: ConverseCommandOutput = { - $metadata: {}, - metrics: undefined, - output: { - message: { - role: 'assistant', - content: [ - { - text: 'block1', - }, - { - text: 'block2', - }, - ], - }, - }, - stopReason: 'end_turn', - usage: undefined, - }; - const bedrockClientSendMock = mock.method(bedrockClient, 'send', () => - Promise.resolve(bedrockResponse) - ); - - await new BedrockConverseAdapter( - event, - [], - bedrockClient, - undefined, - messageHistoryRetriever - ).askBedrock(); - - assert.strictEqual(bedrockClientSendMock.mock.calls.length, 1); - const bedrockRequest = bedrockClientSendMock.mock.calls[0] - .arguments[0] as unknown as ConverseCommand; - assert.deepStrictEqual(bedrockRequest.input.messages, [ - { - role: 'user', - content: [ - { - image: { - format: 'png', - source: { - bytes: fakeImagePayload, - }, - }, - }, - ], - }, - ]); - }); - void it('adds user agent middleware', async () => { const event: ConversationTurnEvent = { ...commonEvent, }; - event.request.headers['x-amz-user-agent'] = 'testUserAgent'; - const bedrockClient = new BedrockRuntimeClient(); const addMiddlewareMock = mock.method(bedrockClient.middlewareStack, 'add'); + const userAgentProvider = new UserAgentProvider( + {} as unknown as ConversationTurnEvent + ); + mock.method(userAgentProvider, 'getUserAgent', () => 'testUserAgent'); new BedrockConverseAdapter( event, [], bedrockClient, undefined, - messageHistoryRetriever + messageHistoryRetriever, + userAgentProvider ); assert.strictEqual(addMiddlewareMock.mock.calls.length, 1); @@ -880,3 +935,157 @@ void describe('Bedrock converse adapter', () => { ); }); }); + +const askBedrockWithStreaming = async ( + adapter: BedrockConverseAdapter +): Promise> => { + const chunks: Array = []; + for await (const chunk of adapter.askBedrockStreaming()) { + chunks.push(chunk); + } + return chunks; +}; + +const mockBedrockResponse = ( + contentBlocks: + | Array + | Array, + streamResponse: boolean +): ConverseStreamCommandOutput | ConverseCommandOutput => { + if (streamResponse) { + return mockConverseStreamCommandOutput(contentBlocks); + } + return mockConverseCommandOutput(contentBlocks); +}; +const mockConverseCommandOutput = ( + contentBlocks: + | Array + | Array +): ConverseCommandOutput => { + let stopReason: StopReason = 'end_turn'; + if (contentBlocks.find((block) => block.toolUse)) { + stopReason = 'tool_use'; + } + return { + $metadata: {}, + metrics: undefined, + output: { + message: { + role: 'assistant', + content: contentBlocks, + }, + }, + stopReason, + usage: undefined, + }; +}; + +const mockConverseStreamCommandOutput = ( + contentBlocks: + | Array + | Array +): ConverseStreamCommandOutput => { + const streamItems: Array = []; + let stopReason: StopReason | undefined; + streamItems.push({ + messageStart: { + role: 'assistant', + }, + }); + for (let i = 0; i < contentBlocks.length; i++) { + const block = contentBlocks[i]; + if (block.toolUse) { + stopReason = 'tool_use'; + streamItems.push({ + contentBlockStart: { + contentBlockIndex: i, + start: { + toolUse: { + toolUseId: block.toolUse.toolUseId, + name: block.toolUse.name, + }, + }, + }, + }); + const input = JSON.stringify(block.toolUse.input); + streamItems.push({ + contentBlockDelta: { + contentBlockIndex: i, + delta: { + toolUse: { + // simulate chunked input + input: input.substring(0, 1), + }, + }, + }, + }); + if (input.length > 1) { + streamItems.push({ + contentBlockDelta: { + contentBlockIndex: i, + delta: { + toolUse: { + // simulate chunked input + input: input.substring(1), + }, + }, + }, + }); + } + streamItems.push({ + contentBlockStop: { + contentBlockIndex: i, + }, + }); + } else if (block.text) { + stopReason = 'end_turn'; + streamItems.push({ + contentBlockStart: { + contentBlockIndex: i, + start: undefined, + }, + }); + const input = block.text; + streamItems.push({ + contentBlockDelta: { + contentBlockIndex: i, + delta: { + // simulate chunked input + text: input.substring(0, 1), + }, + }, + }); + if (input.length > 1) { + streamItems.push({ + contentBlockDelta: { + contentBlockIndex: i, + delta: { + // simulate chunked input + text: input.substring(1), + }, + }, + }); + } + streamItems.push({ + contentBlockStop: { + contentBlockIndex: i, + }, + }); + } else { + throw new Error('Unsupported block type'); + } + } + streamItems.push({ + messageStop: { + stopReason: stopReason, + }, + }); + return { + $metadata: {}, + stream: (async function* (): AsyncGenerator { + for (const streamItem of streamItems) { + yield streamItem; + } + })(), + }; +}; diff --git a/packages/ai-constructs/src/conversation/runtime/bedrock_converse_adapter.ts b/packages/ai-constructs/src/conversation/runtime/bedrock_converse_adapter.ts index 2a8bc77bfc..3bb4843698 100644 --- a/packages/ai-constructs/src/conversation/runtime/bedrock_converse_adapter.ts +++ b/packages/ai-constructs/src/conversation/runtime/bedrock_converse_adapter.ts @@ -4,6 +4,9 @@ import { ConverseCommand, ConverseCommandInput, ConverseCommandOutput, + ConverseStreamCommand, + ConverseStreamCommandInput, + ConverseStreamCommandOutput, Message, Tool, ToolConfiguration, @@ -12,10 +15,14 @@ import { import { ConversationTurnEvent, ExecutableTool, + StreamingResponseChunk, ToolDefinition, } from './types.js'; import { ConversationTurnEventToolsProvider } from './event-tools-provider'; import { ConversationMessageHistoryRetriever } from './conversation_message_history_retriever'; +import * as bedrock from '@aws-sdk/client-bedrock-runtime'; +import { ValidationError } from './errors'; +import { UserAgentProvider } from './user_agent_provider'; /** * This class is responsible for interacting with Bedrock Converse API @@ -42,23 +49,22 @@ export class BedrockConverseAdapter { private readonly messageHistoryRetriever = new ConversationMessageHistoryRetriever( event ), + userAgentProvider = new UserAgentProvider(event), private readonly logger = console ) { - if (event.request.headers['x-amz-user-agent']) { - this.bedrockClient.middlewareStack.add( - (next) => (args) => { - // @ts-expect-error Request is typed as unknown. - // But this is recommended way to alter headers per https://github.com/aws/aws-sdk-js-v3/blob/main/README.md. - args.request.headers['x-amz-user-agent'] = - event.request.headers['x-amz-user-agent']; - return next(args); - }, - { - step: 'build', - name: 'amplify-user-agent-injector', - } - ); - } + this.bedrockClient.middlewareStack.add( + (next) => (args) => { + // @ts-expect-error Request is typed as unknown. + // But this is recommended way to alter headers per https://github.com/aws/aws-sdk-js-v3/blob/main/README.md. + args.request.headers['x-amz-user-agent'] = + userAgentProvider.getUserAgent(); + return next(args); + }, + { + step: 'build', + name: 'amplify-user-agent-injector', + } + ); this.executableTools = [ ...eventToolsProvider.getEventTools(), ...additionalTools, @@ -82,7 +88,7 @@ export class BedrockConverseAdapter { this.clientToolByName.set(t.name, t); }); if (duplicateTools.size > 0) { - throw new Error( + throw new ValidationError( `Tools must have unique names. Duplicate tools: ${[ ...duplicateTools, ].join(', ')}.` @@ -153,6 +159,181 @@ export class BedrockConverseAdapter { return bedrockResponse.output?.message?.content ?? []; }; + /** + * Asks Bedrock for response using streaming version of Converse API. + */ + async *askBedrockStreaming(): AsyncGenerator { + const { modelId, systemPrompt, inferenceConfiguration } = + this.event.modelConfiguration; + + const messages: Array = + await this.getEventMessagesAsBedrockMessages(); + + let bedrockResponse: ConverseStreamCommandOutput; + // keep our own indexing for blocks instead of using Bedrock's indexes + // since we stream subset of these upstream. + let blockIndex = 0; + let lastBlockIndex = 0; + let stopReason = ''; + // Accumulates client facing content per turn. + // So that upstream can persist full message at the end of the streaming. + const accumulatedTurnContent: Array = []; + do { + const toolConfig = this.createToolConfiguration(); + const converseCommandInput: ConverseStreamCommandInput = { + modelId, + messages: [...messages], + system: [{ text: systemPrompt }], + inferenceConfig: inferenceConfiguration, + toolConfig, + }; + this.logger.info('Sending Bedrock Converse Stream request'); + this.logger.debug( + 'Bedrock Converse Stream request:', + converseCommandInput + ); + bedrockResponse = await this.bedrockClient.send( + new ConverseStreamCommand(converseCommandInput) + ); + this.logger.info( + `Received Bedrock Converse Stream response, requestId=${bedrockResponse.$metadata.requestId}` + ); + if (!bedrockResponse.stream) { + throw new Error('Bedrock response is missing stream'); + } + let toolUseBlock: ContentBlock.ToolUseMember | undefined; + let clientToolsRequested = false; + let text: string = ''; + let toolUseInput: string = ''; + let blockDeltaIndex = 0; + let lastBlockDeltaIndex = 0; + // Accumulate current message for the tool use loop purpose. + const accumulatedAssistantMessage: Message = { + role: undefined, + content: [], + }; + + for await (const chunk of bedrockResponse.stream) { + this.logger.debug('Bedrock Converse Stream response chunk:', chunk); + if (chunk.messageStart) { + accumulatedAssistantMessage.role = chunk.messageStart.role; + } else if (chunk.contentBlockStart) { + blockDeltaIndex = 0; + lastBlockDeltaIndex = 0; + if (chunk.contentBlockStart.start?.toolUse) { + toolUseBlock = { + toolUse: { + ...chunk.contentBlockStart.start?.toolUse, + input: undefined, + }, + }; + } + } else if (chunk.contentBlockDelta) { + if (chunk.contentBlockDelta.delta?.toolUse) { + if (!chunk.contentBlockDelta.delta.toolUse.input) { + toolUseInput = ''; + } + toolUseInput += chunk.contentBlockDelta.delta.toolUse.input; + } else if (chunk.contentBlockDelta.delta?.text) { + text += chunk.contentBlockDelta.delta.text; + yield { + accumulatedTurnContent: [...accumulatedTurnContent, { text }], + conversationId: this.event.conversationId, + associatedUserMessageId: this.event.currentMessageId, + contentBlockText: chunk.contentBlockDelta.delta.text, + contentBlockIndex: blockIndex, + contentBlockDeltaIndex: blockDeltaIndex, + }; + lastBlockDeltaIndex = blockDeltaIndex; + blockDeltaIndex++; + } + } else if (chunk.contentBlockStop) { + if (toolUseBlock) { + toolUseBlock.toolUse.input = JSON.parse(toolUseInput); + accumulatedAssistantMessage.content?.push(toolUseBlock); + if ( + toolUseBlock.toolUse.name && + this.clientToolByName.has(toolUseBlock.toolUse.name) + ) { + clientToolsRequested = true; + accumulatedTurnContent.push(toolUseBlock); + yield { + accumulatedTurnContent: [...accumulatedTurnContent], + conversationId: this.event.conversationId, + associatedUserMessageId: this.event.currentMessageId, + contentBlockIndex: blockIndex, + contentBlockToolUse: JSON.stringify(toolUseBlock), + }; + lastBlockIndex = blockIndex; + blockIndex++; + } + toolUseBlock = undefined; + toolUseInput = ''; + } else { + accumulatedAssistantMessage.content?.push({ + text, + }); + accumulatedTurnContent.push({ text }); + yield { + accumulatedTurnContent: [...accumulatedTurnContent], + conversationId: this.event.conversationId, + associatedUserMessageId: this.event.currentMessageId, + contentBlockIndex: blockIndex, + contentBlockDoneAtIndex: lastBlockDeltaIndex, + }; + text = ''; + lastBlockIndex = blockIndex; + blockIndex++; + } + } else if (chunk.messageStop) { + stopReason = chunk.messageStop.stopReason ?? ''; + } + } + this.logger.debug( + 'Accumulated Bedrock Converse Stream response:', + accumulatedAssistantMessage + ); + if (clientToolsRequested) { + // For now if any of client tools is used we ignore executable tools + // and propagate result back to client. + yield { + accumulatedTurnContent: [...accumulatedTurnContent], + conversationId: this.event.conversationId, + associatedUserMessageId: this.event.currentMessageId, + contentBlockIndex: lastBlockIndex, + stopReason: stopReason, + }; + return; + } + messages.push(accumulatedAssistantMessage); + if (stopReason === 'tool_use') { + const responseContentBlocks = accumulatedAssistantMessage.content ?? []; + const toolUseBlocks = responseContentBlocks.filter( + (block) => 'toolUse' in block + ) as Array; + const toolResponseContentBlocks: Array = []; + for (const responseContentBlock of toolUseBlocks) { + const toolUseBlock = + responseContentBlock as ContentBlock.ToolUseMember; + const toolResultContentBlock = await this.executeTool(toolUseBlock); + toolResponseContentBlocks.push(toolResultContentBlock); + } + messages.push({ + role: 'user', + content: toolResponseContentBlocks, + }); + } + } while (stopReason === 'tool_use'); + + yield { + accumulatedTurnContent: [...accumulatedTurnContent], + conversationId: this.event.conversationId, + associatedUserMessageId: this.event.currentMessageId, + contentBlockIndex: lastBlockIndex, + stopReason: stopReason, + }; + } + /** * Maps event messages to Bedrock types. * 1. Makes a copy so that we don't mutate event. diff --git a/packages/ai-constructs/src/conversation/runtime/conversation_message_history_retriever.test.ts b/packages/ai-constructs/src/conversation/runtime/conversation_message_history_retriever.test.ts index 0c349a34e2..9215fc80da 100644 --- a/packages/ai-constructs/src/conversation/runtime/conversation_message_history_retriever.test.ts +++ b/packages/ai-constructs/src/conversation/runtime/conversation_message_history_retriever.test.ts @@ -12,6 +12,7 @@ import { GetQueryOutput, ListQueryOutput, } from './conversation_message_history_retriever'; +import { UserAgentProvider } from './user_agent_provider'; type TestCase = { name: string; @@ -394,11 +395,325 @@ void describe('Conversation message history retriever', () => { }, ], }, + { + name: 'Parses client tools json elements', + mockListResponseMessages: [ + { + id: event.currentMessageId, + conversationId: event.conversationId, + role: 'user', + content: [ + { + toolUse: { + name: 'testToolUse', + toolUseId: 'testToolUseId', + input: '{ "testKey": "testValue" }', + }, + }, + { + toolResult: { + status: 'success', + toolUseId: 'testToolUseId', + content: [ + { + json: '{ "testKey": "testValue" }', + }, + ], + }, + }, + ], + }, + ], + expectedMessages: [ + { + role: 'user', + content: [ + { + toolUse: { + name: 'testToolUse', + toolUseId: 'testToolUseId', + input: { testKey: 'testValue' }, + }, + }, + { + toolResult: { + status: 'success', + toolUseId: 'testToolUseId', + content: [ + { + json: { testKey: 'testValue' }, + }, + ], + }, + }, + ], + }, + ], + }, + { + name: 'Removes tool usage from non-current turns', + mockListResponseMessages: [ + { + id: 'someNonCurrentMessageId1', + conversationId: event.conversationId, + role: 'user', + content: [ + { + text: 'nonCurrentMessage1', + }, + ], + }, + { + id: 'someNonCurrentMessageId2', + associatedUserMessageId: 'someNonCurrentMessageId1', + conversationId: event.conversationId, + role: 'assistant', + content: [ + { + text: 'nonCurrentMessage2', + }, + { + toolUse: { + name: 'testToolUse1', + toolUseId: 'testToolUseId1', + input: undefined, + }, + }, + ], + }, + { + id: 'someNonCurrentMessageId3', + conversationId: event.conversationId, + role: 'user', + content: [ + { + toolResult: { + status: 'success', + toolUseId: 'testToolUseId1', + content: undefined, + }, + }, + ], + }, + { + id: 'someNonCurrentMessageId4', + associatedUserMessageId: 'someNonCurrentMessageId3', + conversationId: event.conversationId, + role: 'assistant', + content: [ + { + text: 'nonCurrentMessage3', + }, + { + toolUse: { + name: 'testToolUse2', + toolUseId: 'testToolUseId2', + input: undefined, + }, + }, + ], + }, + { + id: 'someNonCurrentMessageId5', + conversationId: event.conversationId, + role: 'user', + content: [ + { + toolResult: { + status: 'success', + toolUseId: 'testToolUseId2', + content: undefined, + }, + }, + ], + }, + { + id: 'someNonCurrentMessageId5', + associatedUserMessageId: 'someNonCurrentMessageId5', + conversationId: event.conversationId, + role: 'assistant', + content: [ + { + text: 'nonCurrentMessage4', + }, + ], + }, + // Current turn with multiple tool use. + { + id: 'someCurrentMessageId1', + conversationId: event.conversationId, + role: 'user', + content: [ + { + text: 'currentMessage1', + }, + ], + }, + { + id: 'someCurrentMessageId2', + associatedUserMessageId: 'someCurrentMessageId1', + conversationId: event.conversationId, + role: 'assistant', + content: [ + { + text: 'currentMessage2', + }, + { + toolUse: { + name: 'testToolUse3', + toolUseId: 'testToolUseId3', + input: undefined, + }, + }, + ], + }, + { + id: 'someCurrentMessageId3', + conversationId: event.conversationId, + role: 'user', + content: [ + { + toolResult: { + status: 'success', + toolUseId: 'testToolUseId3', + content: undefined, + }, + }, + ], + }, + { + id: 'someCurrentMessageId4', + associatedUserMessageId: 'someCurrentMessageId3', + conversationId: event.conversationId, + role: 'assistant', + content: [ + { + text: 'currentMessage3', + }, + { + toolUse: { + name: 'testToolUse4', + toolUseId: 'testToolUseId4', + input: undefined, + }, + }, + ], + }, + { + id: event.currentMessageId, + conversationId: event.conversationId, + role: 'user', + content: [ + { + toolResult: { + status: 'success', + toolUseId: 'testToolUseId2', + content: undefined, + }, + }, + ], + }, + ], + expectedMessages: [ + { + role: 'user', + content: [ + { + text: 'nonCurrentMessage1', + }, + ], + }, + { + role: 'assistant', + content: [ + { + text: 'nonCurrentMessage2', + }, + { + text: 'nonCurrentMessage3', + }, + { + text: 'nonCurrentMessage4', + }, + ], + }, + { + role: 'user', + content: [ + { + text: 'currentMessage1', + }, + ], + }, + { + role: 'assistant', + content: [ + { + text: 'currentMessage2', + }, + { + toolUse: { + name: 'testToolUse3', + toolUseId: 'testToolUseId3', + input: undefined, + }, + }, + ], + }, + { + role: 'user', + content: [ + { + toolResult: { + status: 'success', + toolUseId: 'testToolUseId3', + content: undefined, + }, + }, + ], + }, + { + role: 'assistant', + content: [ + { + text: 'currentMessage3', + }, + { + toolUse: { + name: 'testToolUse4', + toolUseId: 'testToolUseId4', + input: undefined, + }, + }, + ], + }, + { + role: 'user', + content: [ + { + toolResult: { + status: 'success', + toolUseId: 'testToolUseId2', + content: undefined, + }, + }, + ], + }, + ], + }, ]; for (const testCase of testCases) { void it(testCase.name, async () => { - const graphqlRequestExecutor = new GraphqlRequestExecutor('', '', ''); + const userAgentProvider = new UserAgentProvider( + {} as unknown as ConversationTurnEvent + ); + mock.method(userAgentProvider, 'getUserAgent', () => ''); + const graphqlRequestExecutor = new GraphqlRequestExecutor( + '', + '', + userAgentProvider + ); const executeGraphqlMock = mock.method( graphqlRequestExecutor, 'executeGraphql', diff --git a/packages/ai-constructs/src/conversation/runtime/conversation_message_history_retriever.ts b/packages/ai-constructs/src/conversation/runtime/conversation_message_history_retriever.ts index 54a11f1eee..c98889522a 100644 --- a/packages/ai-constructs/src/conversation/runtime/conversation_message_history_retriever.ts +++ b/packages/ai-constructs/src/conversation/runtime/conversation_message_history_retriever.ts @@ -1,5 +1,10 @@ -import { ConversationMessage, ConversationTurnEvent } from './types'; +import { + ConversationMessage, + ConversationMessageContentBlock, + ConversationTurnEvent, +} from './types'; import { GraphqlRequestExecutor } from './graphql_request_executor'; +import { UserAgentProvider } from './user_agent_provider'; export type ConversationHistoryMessageItem = ConversationMessage & { id: string; @@ -103,15 +108,11 @@ export class ConversationMessageHistoryRetriever { private readonly graphqlRequestExecutor = new GraphqlRequestExecutor( event.graphqlApiEndpoint, event.request.headers.authorization, - event.request.headers['x-amz-user-agent'] + new UserAgentProvider(event) ) ) {} getMessageHistory = async (): Promise> => { - if (this.event.messages?.length) { - // This is for backwards compatibility and should be removed with messages property. - return this.event.messages; - } const messages = await this.listMessages(); let currentMessage = messages.find( @@ -141,7 +142,7 @@ export class ConversationMessageHistoryRetriever { }); // Reconcile history and inject aiContext - return messages.reduce((acc, current) => { + const orderedMessages = messages.reduce((acc, current) => { // Bedrock expects that message history is user->assistant->user->assistant->... and so on. // The chronological order doesn't assure this ordering if there were any concurrent messages sent. // Therefore, conversation is ordered by user's messages only and corresponding assistant messages are inserted @@ -180,6 +181,81 @@ export class ConversationMessageHistoryRetriever { } return acc; }, [] as Array); + + // Remove tool usage from non-current turn and squash messages. + return this.squashNonCurrentTurns(orderedMessages); + }; + + /** + * This function removes tool usage from non-current turns. + * The tool usage and result blocks don't matter after a turn is completed, + * but do cost extra tokens to process. + * The algorithm is as follows: + * 1. Find where current turn begins. I.e. last user message that isn't tool block. + * 2. Remove toolUse and toolResult blocks before current turn. + * 3. Squash continuous sequences of messages that belong to same 'message.role'. + */ + private squashNonCurrentTurns = (messages: Array) => { + const isNonToolBlockPredicate = ( + contentBlock: ConversationMessageContentBlock + ) => !contentBlock.toolUse && !contentBlock.toolResult; + + // find where current turn begins. I.e. last user message that is not related to tools + const lastNonToolUseUserMessageIndex = messages.findLastIndex((message) => { + return ( + message.role === 'user' && message.content.find(isNonToolBlockPredicate) + ); + }); + + // No non-current turns, don't transform. + if (lastNonToolUseUserMessageIndex <= 0) { + return messages; + } + + const squashedMessages: Array = []; + + // Define a "buffer". I.e. a message we keep around and squash content on. + let currentSquashedMessage: ConversationMessage | undefined = undefined; + // Process messages before current turn begins + // Remove tool usage blocks. + // Combine content for consecutive message that have same role. + for (let i = 0; i < lastNonToolUseUserMessageIndex; i++) { + const currentMessage = messages[i]; + const currentMessageRole = currentMessage.role; + const currentMessageNonToolContent = currentMessage.content.filter( + isNonToolBlockPredicate + ); + if (currentMessageNonToolContent.length === 0) { + // Tool only message. Nothing to squash, skip; + continue; + } + + if (!currentSquashedMessage) { + // Nothing squashed yet, initialize the buffer. + currentSquashedMessage = { + role: currentMessageRole, + content: currentMessageNonToolContent, + }; + } else if (currentSquashedMessage.role === currentMessageRole) { + // if role is same append content. + currentSquashedMessage.content.push(...currentMessageNonToolContent); + } else { + // if role flips push current squashed message and re-initialize the buffer. + squashedMessages.push(currentSquashedMessage); + currentSquashedMessage = { + role: currentMessageRole, + content: currentMessageNonToolContent, + }; + } + } + // flush the last buffer. + if (currentSquashedMessage) { + squashedMessages.push(currentSquashedMessage); + } + + // Append current turn as is. + squashedMessages.push(...messages.slice(lastNonToolUseUserMessageIndex)); + return squashedMessages; }; private getCurrentMessage = @@ -250,6 +326,23 @@ export class ConversationMessageHistoryRetriever { contentBlock[property] = undefined; } } + + if (typeof contentBlock.toolUse?.input === 'string') { + // toolUse.input may come as serialized JSON for Client Tools. + // Parse it in that case. + contentBlock.toolUse.input = JSON.parse(contentBlock.toolUse.input); + } + if (contentBlock.toolResult?.content) { + contentBlock.toolResult.content.forEach((toolResultContentBlock) => { + if (typeof toolResultContentBlock.json === 'string') { + // toolResult.content[].json may come as serialized JSON for Client Tools. + // Parse it in that case. + toolResultContentBlock.json = JSON.parse( + toolResultContentBlock.json + ); + } + }); + } }); }); diff --git a/packages/ai-constructs/src/conversation/runtime/conversation_turn_executor.test.ts b/packages/ai-constructs/src/conversation/runtime/conversation_turn_executor.test.ts index d30be4d7df..8c42431b63 100644 --- a/packages/ai-constructs/src/conversation/runtime/conversation_turn_executor.test.ts +++ b/packages/ai-constructs/src/conversation/runtime/conversation_turn_executor.test.ts @@ -1,17 +1,17 @@ import { describe, it, mock } from 'node:test'; import assert from 'node:assert'; import { ConversationTurnExecutor } from './conversation_turn_executor'; -import { ConversationTurnEvent } from './types'; +import { ConversationTurnEvent, StreamingResponseChunk } from './types'; import { BedrockConverseAdapter } from './bedrock_converse_adapter'; import { ContentBlock } from '@aws-sdk/client-bedrock-runtime'; import { ConversationTurnResponseSender } from './conversation_turn_response_sender'; +import { Lazy } from './lazy'; void describe('Conversation turn executor', () => { const event: ConversationTurnEvent = { conversationId: 'testConversationId', currentMessageId: 'testCurrentMessageId', graphqlApiEndpoint: '', - messages: [], messageHistoryQuery: { getQueryName: '', getQueryInputTypeName: '', @@ -45,6 +45,12 @@ void describe('Conversation turn executor', () => { () => Promise.resolve() ); + const streamResponseSenderSendResponseMock = mock.method( + responseSender, + 'sendResponseChunk', + () => Promise.resolve() + ); + const consoleErrorMock = mock.fn(); const consoleLogMock = mock.fn(); const consoleDebugMock = mock.fn(); @@ -57,8 +63,8 @@ void describe('Conversation turn executor', () => { await new ConversationTurnExecutor( event, [], - bedrockConverseAdapter, - responseSender, + new Lazy(() => responseSender), + new Lazy(() => bedrockConverseAdapter), consoleMock ).execute(); @@ -66,6 +72,10 @@ void describe('Conversation turn executor', () => { bedrockConverseAdapterAskBedrockMock.mock.calls.length, 1 ); + assert.strictEqual( + streamResponseSenderSendResponseMock.mock.calls.length, + 0 + ); assert.strictEqual(responseSenderSendResponseMock.mock.calls.length, 1); assert.deepStrictEqual( responseSenderSendResponseMock.mock.calls[0].arguments[0], @@ -85,6 +95,105 @@ void describe('Conversation turn executor', () => { assert.strictEqual(consoleErrorMock.mock.calls.length, 0); }); + void it('executes turn successfully with streaming response', async () => { + const streamingEvent: ConversationTurnEvent = { + ...event, + streamResponse: true, + }; + const bedrockConverseAdapter = new BedrockConverseAdapter( + streamingEvent, + [] + ); + const chunks: Array = [ + { + contentBlockText: 'chunk1', + contentBlockIndex: 0, + contentBlockDeltaIndex: 1, + conversationId: 'testConversationId', + associatedUserMessageId: 'testCurrentMessageId', + accumulatedTurnContent: [{ text: 'chunk1' }], + }, + { + contentBlockText: 'chunk2', + contentBlockIndex: 0, + contentBlockDeltaIndex: 1, + conversationId: 'testConversationId', + associatedUserMessageId: 'testCurrentMessageId', + accumulatedTurnContent: [{ text: 'chunk1chunk2' }], + }, + ]; + const bedrockConverseAdapterAskBedrockMock = mock.method( + bedrockConverseAdapter, + 'askBedrockStreaming', + () => + (async function* (): AsyncGenerator { + for (const chunk of chunks) { + yield chunk; + } + })() + ); + const responseSender = new ConversationTurnResponseSender(streamingEvent); + const responseSenderSendResponseMock = mock.method( + responseSender, + 'sendResponse', + () => Promise.resolve() + ); + + const streamResponseSenderSendResponseMock = mock.method( + responseSender, + 'sendResponseChunk', + () => Promise.resolve() + ); + + const consoleErrorMock = mock.fn(); + const consoleLogMock = mock.fn(); + const consoleDebugMock = mock.fn(); + const consoleMock = { + error: consoleErrorMock, + log: consoleLogMock, + debug: consoleDebugMock, + } as unknown as Console; + + await new ConversationTurnExecutor( + streamingEvent, + [], + new Lazy(() => responseSender), + new Lazy(() => bedrockConverseAdapter), + consoleMock + ).execute(); + + assert.strictEqual( + bedrockConverseAdapterAskBedrockMock.mock.calls.length, + 1 + ); + assert.strictEqual( + streamResponseSenderSendResponseMock.mock.calls.length, + 2 + ); + assert.deepStrictEqual( + streamResponseSenderSendResponseMock.mock.calls[0].arguments[0], + chunks[0] + ); + assert.deepStrictEqual( + streamResponseSenderSendResponseMock.mock.calls[1].arguments[0], + chunks[1] + ); + + assert.strictEqual(responseSenderSendResponseMock.mock.calls.length, 0); + + assert.strictEqual(consoleLogMock.mock.calls.length, 2); + assert.strictEqual( + consoleLogMock.mock.calls[0].arguments[0], + 'Handling conversation turn event, currentMessageId=testCurrentMessageId, conversationId=testConversationId' + ); + assert.strictEqual( + consoleLogMock.mock.calls[1].arguments[0], + 'Conversation turn event handled successfully, currentMessageId=testCurrentMessageId, conversationId=testConversationId' + ); + + assert.strictEqual(consoleErrorMock.mock.calls.length, 0); + }); + void it('logs and propagates error if bedrock adapter throws', async () => { const bedrockConverseAdapter = new BedrockConverseAdapter(event, []); const bedrockError = new Error('Bedrock failed'); @@ -100,13 +209,27 @@ void describe('Conversation turn executor', () => { () => Promise.resolve() ); + const streamResponseSenderSendResponseMock = mock.method( + responseSender, + 'sendResponseChunk', + () => Promise.resolve() + ); + + const responseSenderSendErrorsMock = mock.method( + responseSender, + 'sendErrors', + () => Promise.resolve() + ); + const consoleErrorMock = mock.fn(); const consoleLogMock = mock.fn(); const consoleDebugMock = mock.fn(); + const consoleWarnMock = mock.fn(); const consoleMock = { error: consoleErrorMock, log: consoleLogMock, debug: consoleDebugMock, + warn: consoleWarnMock, } as unknown as Console; await assert.rejects( @@ -114,8 +237,8 @@ void describe('Conversation turn executor', () => { new ConversationTurnExecutor( event, [], - bedrockConverseAdapter, - responseSender, + new Lazy(() => responseSender), + new Lazy(() => bedrockConverseAdapter), consoleMock ).execute(), (error: Error) => { @@ -128,6 +251,10 @@ void describe('Conversation turn executor', () => { bedrockConverseAdapterAskBedrockMock.mock.calls.length, 1 ); + assert.strictEqual( + streamResponseSenderSendResponseMock.mock.calls.length, + 0 + ); assert.strictEqual(responseSenderSendResponseMock.mock.calls.length, 0); assert.strictEqual(consoleLogMock.mock.calls.length, 1); @@ -145,6 +272,16 @@ void describe('Conversation turn executor', () => { consoleErrorMock.mock.calls[0].arguments[1], bedrockError ); + assert.strictEqual(responseSenderSendErrorsMock.mock.calls.length, 1); + assert.deepStrictEqual( + responseSenderSendErrorsMock.mock.calls[0].arguments[0], + [ + { + errorType: 'Error', + message: 'Bedrock failed', + }, + ] + ); }); void it('logs and propagates error if response sender throws', async () => { @@ -166,13 +303,27 @@ void describe('Conversation turn executor', () => { () => Promise.reject(responseSenderError) ); + const streamResponseSenderSendResponseMock = mock.method( + responseSender, + 'sendResponseChunk', + () => Promise.resolve() + ); + + const responseSenderSendErrorsMock = mock.method( + responseSender, + 'sendErrors', + () => Promise.resolve() + ); + const consoleErrorMock = mock.fn(); const consoleLogMock = mock.fn(); const consoleDebugMock = mock.fn(); + const consoleWarnMock = mock.fn(); const consoleMock = { error: consoleErrorMock, log: consoleLogMock, debug: consoleDebugMock, + warn: consoleWarnMock, } as unknown as Console; await assert.rejects( @@ -180,8 +331,8 @@ void describe('Conversation turn executor', () => { new ConversationTurnExecutor( event, [], - bedrockConverseAdapter, - responseSender, + new Lazy(() => responseSender), + new Lazy(() => bedrockConverseAdapter), consoleMock ).execute(), (error: Error) => { @@ -194,6 +345,10 @@ void describe('Conversation turn executor', () => { bedrockConverseAdapterAskBedrockMock.mock.calls.length, 1 ); + assert.strictEqual( + streamResponseSenderSendResponseMock.mock.calls.length, + 0 + ); assert.strictEqual(responseSenderSendResponseMock.mock.calls.length, 1); assert.strictEqual(consoleLogMock.mock.calls.length, 1); @@ -211,5 +366,180 @@ void describe('Conversation turn executor', () => { consoleErrorMock.mock.calls[0].arguments[1], responseSenderError ); + assert.strictEqual(responseSenderSendErrorsMock.mock.calls.length, 1); + assert.deepStrictEqual( + responseSenderSendErrorsMock.mock.calls[0].arguments[0], + [ + { + errorType: 'Error', + message: 'Failed to send response', + }, + ] + ); + }); + + void it('throws original exception if error sender fails', async () => { + const bedrockConverseAdapter = new BedrockConverseAdapter(event, []); + const originalError = new Error('original error'); + mock.method(bedrockConverseAdapter, 'askBedrock', () => + Promise.reject(originalError) + ); + const responseSender = new ConversationTurnResponseSender(event); + mock.method(responseSender, 'sendResponse', () => Promise.resolve()); + + mock.method(responseSender, 'sendResponseChunk', () => Promise.resolve()); + + const responseSenderSendErrorsMock = mock.method( + responseSender, + 'sendErrors', + () => Promise.reject(new Error('sender error')) + ); + + const consoleErrorMock = mock.fn(); + const consoleLogMock = mock.fn(); + const consoleDebugMock = mock.fn(); + const consoleWarnMock = mock.fn(); + const consoleMock = { + error: consoleErrorMock, + log: consoleLogMock, + debug: consoleDebugMock, + warn: consoleWarnMock, + } as unknown as Console; + + await assert.rejects( + () => + new ConversationTurnExecutor( + event, + [], + new Lazy(() => responseSender), + new Lazy(() => bedrockConverseAdapter), + consoleMock + ).execute(), + (error: Error) => { + assert.strictEqual(error, originalError); + return true; + } + ); + + assert.strictEqual(responseSenderSendErrorsMock.mock.calls.length, 1); + assert.deepStrictEqual( + responseSenderSendErrorsMock.mock.calls[0].arguments[0], + [ + { + errorType: 'Error', + message: 'original error', + }, + ] + ); + }); + + void it('serializes unknown errors', async () => { + const bedrockConverseAdapter = new BedrockConverseAdapter(event, []); + const unknownError = { some: 'shape' }; + mock.method(bedrockConverseAdapter, 'askBedrock', () => + Promise.reject(unknownError) + ); + const responseSender = new ConversationTurnResponseSender(event); + mock.method(responseSender, 'sendResponse', () => Promise.resolve()); + + mock.method(responseSender, 'sendResponseChunk', () => Promise.resolve()); + + const responseSenderSendErrorsMock = mock.method( + responseSender, + 'sendErrors', + () => Promise.resolve() + ); + + const consoleErrorMock = mock.fn(); + const consoleLogMock = mock.fn(); + const consoleDebugMock = mock.fn(); + const consoleWarnMock = mock.fn(); + const consoleMock = { + error: consoleErrorMock, + log: consoleLogMock, + debug: consoleDebugMock, + warn: consoleWarnMock, + } as unknown as Console; + + await assert.rejects( + () => + new ConversationTurnExecutor( + event, + [], + new Lazy(() => responseSender), + new Lazy(() => bedrockConverseAdapter), + consoleMock + ).execute(), + (error: Error) => { + assert.strictEqual(error, unknownError); + return true; + } + ); + + assert.strictEqual(responseSenderSendErrorsMock.mock.calls.length, 1); + assert.deepStrictEqual( + responseSenderSendErrorsMock.mock.calls[0].arguments[0], + [ + { + errorType: 'UnknownError', + message: '{"some":"shape"}', + }, + ] + ); + }); + + void it('reports initialization errors', async () => { + const bedrockConverseAdapter = new BedrockConverseAdapter(event, []); + mock.method(bedrockConverseAdapter, 'askBedrock', () => Promise.resolve()); + const responseSender = new ConversationTurnResponseSender(event); + mock.method(responseSender, 'sendResponse', () => Promise.resolve()); + + mock.method(responseSender, 'sendResponseChunk', () => Promise.resolve()); + + const responseSenderSendErrorsMock = mock.method( + responseSender, + 'sendErrors', + () => Promise.resolve() + ); + + const consoleErrorMock = mock.fn(); + const consoleLogMock = mock.fn(); + const consoleDebugMock = mock.fn(); + const consoleWarnMock = mock.fn(); + const consoleMock = { + error: consoleErrorMock, + log: consoleLogMock, + debug: consoleDebugMock, + warn: consoleWarnMock, + } as unknown as Console; + + const initializationError = new Error('initialization error'); + await assert.rejects( + () => + new ConversationTurnExecutor( + event, + [], + new Lazy(() => responseSender), + new Lazy(() => { + throw initializationError; + }), + consoleMock + ).execute(), + (error: Error) => { + assert.strictEqual(error, initializationError); + return true; + } + ); + + assert.strictEqual(responseSenderSendErrorsMock.mock.calls.length, 1); + assert.deepStrictEqual( + responseSenderSendErrorsMock.mock.calls[0].arguments[0], + [ + { + errorType: 'Error', + message: 'initialization error', + }, + ] + ); }); }); diff --git a/packages/ai-constructs/src/conversation/runtime/conversation_turn_executor.ts b/packages/ai-constructs/src/conversation/runtime/conversation_turn_executor.ts index 99e66c5f74..9c5389f610 100644 --- a/packages/ai-constructs/src/conversation/runtime/conversation_turn_executor.ts +++ b/packages/ai-constructs/src/conversation/runtime/conversation_turn_executor.ts @@ -1,6 +1,7 @@ import { ConversationTurnResponseSender } from './conversation_turn_response_sender.js'; import { ConversationTurnEvent, ExecutableTool, JSONSchema } from './types.js'; import { BedrockConverseAdapter } from './bedrock_converse_adapter.js'; +import { Lazy } from './lazy'; /** * This class is responsible for orchestrating conversation turn execution. @@ -16,11 +17,13 @@ export class ConversationTurnExecutor { constructor( private readonly event: ConversationTurnEvent, additionalTools: Array, - private readonly bedrockConverseAdapter = new BedrockConverseAdapter( - event, - additionalTools + // We're deferring dependency initialization here so that we can capture all validation errors. + private readonly responseSender = new Lazy( + () => new ConversationTurnResponseSender(event) + ), + private readonly bedrockConverseAdapter = new Lazy( + () => new BedrockConverseAdapter(event, additionalTools) ), - private readonly responseSender = new ConversationTurnResponseSender(event), private readonly logger = console ) {} @@ -31,9 +34,16 @@ export class ConversationTurnExecutor { ); this.logger.debug('Event received:', this.event); - const assistantResponse = await this.bedrockConverseAdapter.askBedrock(); - - await this.responseSender.sendResponse(assistantResponse); + if (this.event.streamResponse) { + const chunks = this.bedrockConverseAdapter.value.askBedrockStreaming(); + for await (const chunk of chunks) { + await this.responseSender.value.sendResponseChunk(chunk); + } + } else { + const assistantResponse = + await this.bedrockConverseAdapter.value.askBedrock(); + await this.responseSender.value.sendResponse(assistantResponse); + } this.logger.log( `Conversation turn event handled successfully, currentMessageId=${this.event.currentMessageId}, conversationId=${this.event.conversationId}` @@ -43,10 +53,28 @@ export class ConversationTurnExecutor { `Failed to handle conversation turn event, currentMessageId=${this.event.currentMessageId}, conversationId=${this.event.conversationId}`, e ); + await this.tryForwardError(e); // Propagate error to mark lambda execution as failed in metrics. throw e; } }; + + private tryForwardError = async (e: unknown) => { + try { + let errorType = 'UnknownError'; + let message: string; + if (e instanceof Error) { + errorType = e.name; + message = e.message; + } else { + message = JSON.stringify(e); + } + await this.responseSender.value.sendErrors([{ errorType, message }]); + } catch (e) { + // Best effort, only log the fact that we tried to send error back to AppSync. + this.logger.warn('Failed to send error mutation', e); + } + }; } /** diff --git a/packages/ai-constructs/src/conversation/runtime/conversation_turn_response_sender.test.ts b/packages/ai-constructs/src/conversation/runtime/conversation_turn_response_sender.test.ts index da49e7eb89..32c579b237 100644 --- a/packages/ai-constructs/src/conversation/runtime/conversation_turn_response_sender.test.ts +++ b/packages/ai-constructs/src/conversation/runtime/conversation_turn_response_sender.test.ts @@ -3,20 +3,25 @@ import assert from 'node:assert'; import { ConversationTurnResponseSender, MutationResponseInput, + MutationStreamingResponseInput, } from './conversation_turn_response_sender'; -import { ConversationTurnEvent } from './types'; +import { + ConversationTurnError, + ConversationTurnEvent, + StreamingResponseChunk, +} from './types'; import { ContentBlock } from '@aws-sdk/client-bedrock-runtime'; import { GraphqlRequest, GraphqlRequestExecutor, } from './graphql_request_executor'; +import { UserAgentProvider } from './user_agent_provider'; void describe('Conversation turn response sender', () => { const event: ConversationTurnEvent = { conversationId: 'testConversationId', currentMessageId: 'testCurrentMessageId', graphqlApiEndpoint: 'http://fake.endpoint/', - messages: [], messageHistoryQuery: { getQueryName: '', getQueryInputTypeName: '', @@ -33,7 +38,19 @@ void describe('Conversation turn response sender', () => { }; void it('sends response back to appsync', async () => { - const graphqlRequestExecutor = new GraphqlRequestExecutor('', '', ''); + const userAgentProvider = new UserAgentProvider( + {} as unknown as ConversationTurnEvent + ); + const userAgentProviderMock = mock.method( + userAgentProvider, + 'getUserAgent', + () => 'testUserAgent' + ); + const graphqlRequestExecutor = new GraphqlRequestExecutor( + '', + '', + userAgentProvider + ); const executeGraphqlMock = mock.method( graphqlRequestExecutor, 'executeGraphql', @@ -43,6 +60,7 @@ void describe('Conversation turn response sender', () => { ); const sender = new ConversationTurnResponseSender( event, + userAgentProvider, graphqlRequestExecutor ); const response: Array = [ @@ -53,7 +71,14 @@ void describe('Conversation turn response sender', () => { ]; await sender.sendResponse(response); + assert.strictEqual(userAgentProviderMock.mock.calls.length, 1); + assert.deepStrictEqual(userAgentProviderMock.mock.calls[0].arguments[0], { + 'turn-response-type': 'single', + }); assert.strictEqual(executeGraphqlMock.mock.calls.length, 1); + assert.deepStrictEqual(executeGraphqlMock.mock.calls[0].arguments[1], { + userAgent: 'testUserAgent', + }); const request = executeGraphqlMock.mock.calls[0] .arguments[0] as GraphqlRequest; assert.deepStrictEqual(request, { @@ -81,7 +106,15 @@ void describe('Conversation turn response sender', () => { }); void it('serializes tool use input to JSON', async () => { - const graphqlRequestExecutor = new GraphqlRequestExecutor('', '', ''); + const userAgentProvider = new UserAgentProvider( + {} as unknown as ConversationTurnEvent + ); + mock.method(userAgentProvider, 'getUserAgent', () => ''); + const graphqlRequestExecutor = new GraphqlRequestExecutor( + '', + '', + userAgentProvider + ); const executeGraphqlMock = mock.method( graphqlRequestExecutor, 'executeGraphql', @@ -91,6 +124,7 @@ void describe('Conversation turn response sender', () => { ); const sender = new ConversationTurnResponseSender( event, + userAgentProvider, graphqlRequestExecutor ); const toolUseBlock: ContentBlock.ToolUseMember = { @@ -134,4 +168,212 @@ void describe('Conversation turn response sender', () => { }, }); }); + + void it('sends streaming response chunk back to appsync', async () => { + const userAgentProvider = new UserAgentProvider( + {} as unknown as ConversationTurnEvent + ); + const userAgentProviderMock = mock.method( + userAgentProvider, + 'getUserAgent', + () => 'testUserAgent' + ); + const graphqlRequestExecutor = new GraphqlRequestExecutor( + '', + '', + userAgentProvider + ); + const executeGraphqlMock = mock.method( + graphqlRequestExecutor, + 'executeGraphql', + () => + // Mock successful Appsync response + Promise.resolve() + ); + const sender = new ConversationTurnResponseSender( + event, + userAgentProvider, + graphqlRequestExecutor + ); + const chunk: StreamingResponseChunk = { + accumulatedTurnContent: [{ text: 'testAccumulatedMessageContent' }], + associatedUserMessageId: 'testAssociatedUserMessageId', + contentBlockIndex: 1, + contentBlockDeltaIndex: 2, + conversationId: 'testConversationId', + contentBlockText: 'testBlockText', + }; + await sender.sendResponseChunk(chunk); + + assert.strictEqual(userAgentProviderMock.mock.calls.length, 1); + assert.deepStrictEqual(userAgentProviderMock.mock.calls[0].arguments[0], { + 'turn-response-type': 'streaming', + }); + assert.strictEqual(executeGraphqlMock.mock.calls.length, 1); + assert.deepStrictEqual(executeGraphqlMock.mock.calls[0].arguments[1], { + userAgent: 'testUserAgent', + }); + const request = executeGraphqlMock.mock.calls[0] + .arguments[0] as GraphqlRequest; + assert.deepStrictEqual(request, { + query: + '\n' + + ' mutation PublishModelResponse($input: testResponseMutationInputTypeName!) {\n' + + ' testResponseMutationName(input: $input) {\n' + + ' testSelectionSet\n' + + ' }\n' + + ' }\n' + + ' ', + variables: { + input: chunk, + }, + }); + }); + + void it('serializes tool use input to JSON when streaming', async () => { + const userAgentProvider = new UserAgentProvider( + {} as unknown as ConversationTurnEvent + ); + mock.method(userAgentProvider, 'getUserAgent', () => ''); + const graphqlRequestExecutor = new GraphqlRequestExecutor( + '', + '', + userAgentProvider + ); + const executeGraphqlMock = mock.method( + graphqlRequestExecutor, + 'executeGraphql', + () => + // Mock successful Appsync response + Promise.resolve() + ); + const sender = new ConversationTurnResponseSender( + event, + userAgentProvider, + graphqlRequestExecutor + ); + const toolUseBlock: ContentBlock.ToolUseMember = { + toolUse: { + name: 'testTool', + toolUseId: 'testToolUseId', + input: { + testPropertyKey: 'testPropertyValue', + }, + }, + }; + const chunk: StreamingResponseChunk = { + accumulatedTurnContent: [toolUseBlock], + associatedUserMessageId: 'testAssociatedUserMessageId', + contentBlockIndex: 1, + contentBlockDeltaIndex: 2, + conversationId: 'testConversationId', + contentBlockText: 'testBlockText', + }; + await sender.sendResponseChunk(chunk); + + assert.strictEqual(executeGraphqlMock.mock.calls.length, 1); + const request = executeGraphqlMock.mock.calls[0] + .arguments[0] as GraphqlRequest; + assert.deepStrictEqual(request, { + query: + '\n' + + ' mutation PublishModelResponse($input: testResponseMutationInputTypeName!) {\n' + + ' testResponseMutationName(input: $input) {\n' + + ' testSelectionSet\n' + + ' }\n' + + ' }\n' + + ' ', + variables: { + input: { + ...chunk, + accumulatedTurnContent: [ + { + toolUse: { + input: JSON.stringify(toolUseBlock.toolUse.input), + name: toolUseBlock.toolUse.name, + toolUseId: toolUseBlock.toolUse.toolUseId, + }, + }, + ], + }, + }, + }); + }); + + void it('sends errors response back to appsync', async () => { + const userAgentProvider = new UserAgentProvider( + {} as unknown as ConversationTurnEvent + ); + const userAgentProviderMock = mock.method( + userAgentProvider, + 'getUserAgent', + () => 'testUserAgent' + ); + const graphqlRequestExecutor = new GraphqlRequestExecutor( + '', + '', + userAgentProvider + ); + const executeGraphqlMock = mock.method( + graphqlRequestExecutor, + 'executeGraphql', + () => + // Mock successful Appsync response + Promise.resolve() + ); + const sender = new ConversationTurnResponseSender( + event, + userAgentProvider, + graphqlRequestExecutor + ); + const errors: Array = [ + { + errorType: 'errorType1', + message: 'errorMessage1', + }, + { + errorType: 'errorType2', + message: 'errorMessage2', + }, + ]; + await sender.sendErrors(errors); + + assert.strictEqual(userAgentProviderMock.mock.calls.length, 1); + assert.deepStrictEqual(userAgentProviderMock.mock.calls[0].arguments[0], { + 'turn-response-type': 'error', + }); + assert.strictEqual(executeGraphqlMock.mock.calls.length, 1); + assert.deepStrictEqual(executeGraphqlMock.mock.calls[0].arguments[1], { + userAgent: 'testUserAgent', + }); + assert.strictEqual(executeGraphqlMock.mock.calls.length, 1); + const request = executeGraphqlMock.mock.calls[0] + .arguments[0] as GraphqlRequest; + assert.deepStrictEqual(request, { + query: + '\n' + + ' mutation PublishModelResponse($input: testResponseMutationInputTypeName!) {\n' + + ' testResponseMutationName(input: $input) {\n' + + ' testSelectionSet\n' + + ' }\n' + + ' }\n' + + ' ', + variables: { + input: { + conversationId: event.conversationId, + errors: [ + { + errorType: 'errorType1', + message: 'errorMessage1', + }, + { + errorType: 'errorType2', + message: 'errorMessage2', + }, + ], + associatedUserMessageId: event.currentMessageId, + }, + }, + }); + }); }); diff --git a/packages/ai-constructs/src/conversation/runtime/conversation_turn_response_sender.ts b/packages/ai-constructs/src/conversation/runtime/conversation_turn_response_sender.ts index 9ca441fd6b..5892b6747c 100644 --- a/packages/ai-constructs/src/conversation/runtime/conversation_turn_response_sender.ts +++ b/packages/ai-constructs/src/conversation/runtime/conversation_turn_response_sender.ts @@ -1,6 +1,11 @@ -import { ConversationTurnEvent } from './types.js'; +import { + ConversationTurnError, + ConversationTurnEvent, + StreamingResponseChunk, +} from './types.js'; import type { ContentBlock } from '@aws-sdk/client-bedrock-runtime'; import { GraphqlRequestExecutor } from './graphql_request_executor'; +import { UserAgentProvider } from './user_agent_provider'; export type MutationResponseInput = { input: { @@ -10,6 +15,18 @@ export type MutationResponseInput = { }; }; +export type MutationStreamingResponseInput = { + input: StreamingResponseChunk; +}; + +export type MutationErrorsResponseInput = { + input: { + conversationId: string; + errors: ConversationTurnError[]; + associatedUserMessageId: string; + }; +}; + /** * This class is responsible for sending a response produced by Bedrock back to AppSync * in a form of mutation. @@ -20,10 +37,11 @@ export class ConversationTurnResponseSender { */ constructor( private readonly event: ConversationTurnEvent, + private readonly userAgentProvider = new UserAgentProvider(event), private readonly graphqlRequestExecutor = new GraphqlRequestExecutor( event.graphqlApiEndpoint, event.request.headers.authorization, - event.request.headers['x-amz-user-agent'] + userAgentProvider ), private readonly logger = console ) {} @@ -34,7 +52,58 @@ export class ConversationTurnResponseSender { await this.graphqlRequestExecutor.executeGraphql< MutationResponseInput, void - >(responseMutationRequest); + >(responseMutationRequest, { + userAgent: this.userAgentProvider.getUserAgent({ + 'turn-response-type': 'single', + }), + }); + }; + + sendResponseChunk = async (chunk: StreamingResponseChunk) => { + const responseMutationRequest = this.createStreamingMutationRequest(chunk); + this.logger.debug('Sending response mutation:', responseMutationRequest); + await this.graphqlRequestExecutor.executeGraphql< + MutationStreamingResponseInput, + void + >(responseMutationRequest, { + userAgent: this.userAgentProvider.getUserAgent({ + 'turn-response-type': 'streaming', + }), + }); + }; + + sendErrors = async (errors: ConversationTurnError[]) => { + const responseMutationRequest = this.createMutationErrorsRequest(errors); + this.logger.debug( + 'Sending errors response mutation:', + responseMutationRequest + ); + await this.graphqlRequestExecutor.executeGraphql< + MutationErrorsResponseInput, + void + >(responseMutationRequest, { + userAgent: this.userAgentProvider.getUserAgent({ + 'turn-response-type': 'error', + }), + }); + }; + + private createMutationErrorsRequest = (errors: ConversationTurnError[]) => { + const query = ` + mutation PublishModelResponse($input: ${this.event.responseMutation.inputTypeName}!) { + ${this.event.responseMutation.name}(input: $input) { + ${this.event.responseMutation.selectionSet} + } + } + `; + const variables: MutationErrorsResponseInput = { + input: { + conversationId: this.event.conversationId, + errors, + associatedUserMessageId: this.event.currentMessageId, + }, + }; + return { query, variables }; }; private createMutationRequest = (content: ContentBlock[]) => { @@ -45,7 +114,39 @@ export class ConversationTurnResponseSender { } } `; - content = content.map((block) => { + content = this.serializeContent(content); + const variables: MutationResponseInput = { + input: { + conversationId: this.event.conversationId, + content, + associatedUserMessageId: this.event.currentMessageId, + }, + }; + return { query, variables }; + }; + + private createStreamingMutationRequest = (chunk: StreamingResponseChunk) => { + const query = ` + mutation PublishModelResponse($input: ${this.event.responseMutation.inputTypeName}!) { + ${this.event.responseMutation.name}(input: $input) { + ${this.event.responseMutation.selectionSet} + } + } + `; + chunk = { + ...chunk, + accumulatedTurnContent: this.serializeContent( + chunk.accumulatedTurnContent + ), + }; + const variables: MutationStreamingResponseInput = { + input: chunk, + }; + return { query, variables }; + }; + + private serializeContent = (content: ContentBlock[]) => { + return content.map((block) => { if (block.toolUse) { // The `input` field is typed as `AWS JSON` in the GraphQL API because it can represent // arbitrary JSON values. @@ -55,13 +156,5 @@ export class ConversationTurnResponseSender { } return block; }); - const variables: MutationResponseInput = { - input: { - conversationId: this.event.conversationId, - content, - associatedUserMessageId: this.event.currentMessageId, - }, - }; - return { query, variables }; }; } diff --git a/packages/ai-constructs/src/conversation/runtime/errors.ts b/packages/ai-constructs/src/conversation/runtime/errors.ts new file mode 100644 index 0000000000..1d3063dd49 --- /dev/null +++ b/packages/ai-constructs/src/conversation/runtime/errors.ts @@ -0,0 +1,12 @@ +/** + * Represents validation errors. + */ +export class ValidationError extends Error { + /** + * Creates validation error instance. + */ + constructor(message: string) { + super(message); + this.name = 'ValidationError'; + } +} diff --git a/packages/ai-constructs/src/conversation/runtime/event-tools-provider/event_tools_provider.test.ts b/packages/ai-constructs/src/conversation/runtime/event-tools-provider/event_tools_provider.test.ts index c35fa7adcc..df63abd33d 100644 --- a/packages/ai-constructs/src/conversation/runtime/event-tools-provider/event_tools_provider.test.ts +++ b/packages/ai-constructs/src/conversation/runtime/event-tools-provider/event_tools_provider.test.ts @@ -11,7 +11,6 @@ void describe('events tool provider', () => { conversationId: '', currentMessageId: '', graphqlApiEndpoint: '', - messages: [], messageHistoryQuery: { getQueryName: '', getQueryInputTypeName: '', @@ -73,7 +72,6 @@ void describe('events tool provider', () => { conversationId: '', currentMessageId: '', graphqlApiEndpoint: '', - messages: [], messageHistoryQuery: { getQueryName: '', getQueryInputTypeName: '', diff --git a/packages/ai-constructs/src/conversation/runtime/event-tools-provider/event_tools_provider.ts b/packages/ai-constructs/src/conversation/runtime/event-tools-provider/event_tools_provider.ts index b895c757be..19ff56dc56 100644 --- a/packages/ai-constructs/src/conversation/runtime/event-tools-provider/event_tools_provider.ts +++ b/packages/ai-constructs/src/conversation/runtime/event-tools-provider/event_tools_provider.ts @@ -1,6 +1,7 @@ import { ConversationTurnEvent, ExecutableTool } from '../types'; import { GraphQlTool } from './graphql_tool'; import { GraphQlQueryFactory } from './graphql_query_factory'; +import { UserAgentProvider } from '../user_agent_provider'; /** * Creates executable tools from definitions in conversation turn event. @@ -29,7 +30,7 @@ export class ConversationTurnEventToolsProvider { graphqlApiEndpoint, query, this.event.request.headers.authorization, - this.event.request.headers['x-amz-user-agent'] + new UserAgentProvider(this.event) ); }); return tools ?? []; diff --git a/packages/ai-constructs/src/conversation/runtime/event-tools-provider/graphql_tool.test.ts b/packages/ai-constructs/src/conversation/runtime/event-tools-provider/graphql_tool.test.ts index 68308ebb7b..d764dca508 100644 --- a/packages/ai-constructs/src/conversation/runtime/event-tools-provider/graphql_tool.test.ts +++ b/packages/ai-constructs/src/conversation/runtime/event-tools-provider/graphql_tool.test.ts @@ -6,11 +6,17 @@ import { GraphqlRequestExecutor, } from '../graphql_request_executor'; import { DocumentType } from '@smithy/types'; +import { UserAgentProvider } from '../user_agent_provider'; +import { ConversationTurnEvent } from '../types'; void describe('GraphQl tool', () => { const graphQlEndpoint = 'http://test.endpoint/'; const query = 'testQuery'; const accessToken = 'testAccessToken'; + const userAgentProvider = new UserAgentProvider( + {} as unknown as ConversationTurnEvent + ); + mock.method(userAgentProvider, 'getUserAgent', () => ''); const createGraphQlTool = ( graphqlRequestExecutor: GraphqlRequestExecutor @@ -22,7 +28,7 @@ void describe('GraphQl tool', () => { graphQlEndpoint, query, accessToken, - '', + userAgentProvider, graphqlRequestExecutor ); }; @@ -31,7 +37,11 @@ void describe('GraphQl tool', () => { const testResponse = { test: 'response', }; - const graphqlRequestExecutor = new GraphqlRequestExecutor('', '', ''); + const graphqlRequestExecutor = new GraphqlRequestExecutor( + '', + '', + userAgentProvider + ); const executeGraphqlMock = mock.method( graphqlRequestExecutor, 'executeGraphql', diff --git a/packages/ai-constructs/src/conversation/runtime/event-tools-provider/graphql_tool.ts b/packages/ai-constructs/src/conversation/runtime/event-tools-provider/graphql_tool.ts index 8890b5f78b..dcd37368a3 100644 --- a/packages/ai-constructs/src/conversation/runtime/event-tools-provider/graphql_tool.ts +++ b/packages/ai-constructs/src/conversation/runtime/event-tools-provider/graphql_tool.ts @@ -2,6 +2,7 @@ import { ExecutableTool, JSONSchema, ToolInputSchema } from '../types'; import type { ToolResultContentBlock } from '@aws-sdk/client-bedrock-runtime'; import { DocumentType } from '@smithy/types'; import { GraphqlRequestExecutor } from '../graphql_request_executor'; +import { UserAgentProvider } from '../user_agent_provider'; /** * A tool that use GraphQl queries. @@ -17,11 +18,11 @@ export class GraphQlTool implements ExecutableTool { readonly graphQlEndpoint: string, private readonly query: string, readonly accessToken: string, - readonly userAgent: string, + readonly userAgentProvider: UserAgentProvider, private readonly graphqlRequestExecutor = new GraphqlRequestExecutor( graphQlEndpoint, accessToken, - userAgent + userAgentProvider ) ) {} diff --git a/packages/ai-constructs/src/conversation/runtime/graphql_request_executor.test.ts b/packages/ai-constructs/src/conversation/runtime/graphql_request_executor.test.ts index 674bad3ea3..fe605b2171 100644 --- a/packages/ai-constructs/src/conversation/runtime/graphql_request_executor.test.ts +++ b/packages/ai-constructs/src/conversation/runtime/graphql_request_executor.test.ts @@ -2,11 +2,17 @@ import { describe, it, mock } from 'node:test'; import assert from 'node:assert'; import { text } from 'node:stream/consumers'; import { GraphqlRequestExecutor } from './graphql_request_executor'; +import { UserAgentProvider } from './user_agent_provider'; +import { ConversationTurnEvent } from './types'; void describe('Graphql executor test', () => { const graphqlEndpoint = 'http://fake.endpoint/'; const accessToken = 'testToken'; const userAgent = 'testUserAgent'; + const userAgentProvider = new UserAgentProvider( + {} as unknown as ConversationTurnEvent + ); + mock.method(userAgentProvider, 'getUserAgent', () => userAgent); void it('sends request to appsync', async () => { const fetchMock = mock.fn( @@ -18,7 +24,7 @@ void describe('Graphql executor test', () => { const executor = new GraphqlRequestExecutor( graphqlEndpoint, accessToken, - userAgent, + userAgentProvider, fetchMock ); const query = 'testQuery'; @@ -47,6 +53,41 @@ void describe('Graphql executor test', () => { }); }); + void it('method provided user agent takes precedence', async () => { + const fetchMock = mock.fn( + fetch, + (): Promise => + // Mock successful Appsync response + Promise.resolve(new Response('{}', { status: 200 })) + ); + const executor = new GraphqlRequestExecutor( + graphqlEndpoint, + accessToken, + userAgentProvider, + fetchMock + ); + const query = 'testQuery'; + const variables = { + testVariableKey: 'testVariableValue', + }; + await executor.executeGraphql( + { + query, + variables, + }, + { + userAgent: 'methodScopedUserAgent', + } + ); + + assert.strictEqual(fetchMock.mock.calls.length, 1); + const request: Request = fetchMock.mock.calls[0].arguments[0] as Request; + assert.strictEqual( + request.headers.get('x-amz-user-agent'), + 'methodScopedUserAgent' + ); + }); + void it('throws if response is not 2xx', async () => { const fetchMock = mock.fn( fetch, @@ -62,7 +103,7 @@ void describe('Graphql executor test', () => { const executor = new GraphqlRequestExecutor( graphqlEndpoint, accessToken, - userAgent, + userAgentProvider, fetchMock ); const query = 'testQuery'; @@ -102,7 +143,7 @@ void describe('Graphql executor test', () => { const executor = new GraphqlRequestExecutor( graphqlEndpoint, accessToken, - userAgent, + userAgentProvider, fetchMock ); const query = 'testQuery'; diff --git a/packages/ai-constructs/src/conversation/runtime/graphql_request_executor.ts b/packages/ai-constructs/src/conversation/runtime/graphql_request_executor.ts index a025b75cff..60f1af44bb 100644 --- a/packages/ai-constructs/src/conversation/runtime/graphql_request_executor.ts +++ b/packages/ai-constructs/src/conversation/runtime/graphql_request_executor.ts @@ -1,3 +1,5 @@ +import { UserAgentProvider } from './user_agent_provider'; + export type GraphqlRequest = { query: string; variables: TVariables; @@ -15,19 +17,23 @@ export class GraphqlRequestExecutor { constructor( private readonly graphQlEndpoint: string, private readonly accessToken: string, - private readonly userAgent: string, + private readonly userAgentProvider: UserAgentProvider, private readonly _fetch = fetch ) {} executeGraphql = async ( - request: GraphqlRequest + request: GraphqlRequest, + options?: { + userAgent?: string; + } ): Promise => { const httpRequest = new Request(this.graphQlEndpoint, { method: 'POST', headers: { 'Content-Type': 'application/graphql', Authorization: this.accessToken, - 'x-amz-user-agent': this.userAgent, + 'x-amz-user-agent': + options?.userAgent ?? this.userAgentProvider.getUserAgent(), }, body: JSON.stringify({ query: request.query, diff --git a/packages/ai-constructs/src/conversation/runtime/index.ts b/packages/ai-constructs/src/conversation/runtime/index.ts index 632d100cbf..187d962b03 100644 --- a/packages/ai-constructs/src/conversation/runtime/index.ts +++ b/packages/ai-constructs/src/conversation/runtime/index.ts @@ -1,6 +1,4 @@ import { - ConversationMessage, - ConversationMessageContentBlock, ConversationTurnEvent, ExecutableTool, FromJSONSchema, @@ -14,8 +12,6 @@ import { handleConversationTurnEvent } from './conversation_turn_executor.js'; import { createExecutableTool } from './executable_tool_factory.js'; export { - ConversationMessage, - ConversationMessageContentBlock, ConversationTurnEvent, createExecutableTool, ExecutableTool, diff --git a/packages/ai-constructs/src/conversation/runtime/lazy.ts b/packages/ai-constructs/src/conversation/runtime/lazy.ts new file mode 100644 index 0000000000..7f5b2032ca --- /dev/null +++ b/packages/ai-constructs/src/conversation/runtime/lazy.ts @@ -0,0 +1,17 @@ +/** + * A class that initializes lazily upon usage. + */ +export class Lazy { + #value?: T; + + /** + * Creates lazy instance. + */ + constructor(private readonly valueFactory: () => T) {} + /** + * Gets a value. Value is create at first access. + */ + public get value(): T { + return (this.#value ??= this.valueFactory()); + } +} diff --git a/packages/ai-constructs/src/conversation/runtime/types.ts b/packages/ai-constructs/src/conversation/runtime/types.ts index 23a54bf146..3d95030cab 100644 --- a/packages/ai-constructs/src/conversation/runtime/types.ts +++ b/packages/ai-constructs/src/conversation/runtime/types.ts @@ -47,6 +47,7 @@ export type ToolDefinition = { export type ConversationTurnEvent = { conversationId: string; currentMessageId: string; + streamResponse?: boolean; responseMutation: { name: string; inputTypeName: string; @@ -66,10 +67,6 @@ export type ConversationTurnEvent = { request: { headers: Record; }; - /** - * @deprecated This field is going to be removed in upcoming releases. - */ - messages?: Array; messageHistoryQuery: { getQueryName: string; getQueryInputTypeName: string; @@ -97,3 +94,49 @@ export type ExecutableTool< > = ToolDefinition & { execute: (input: TToolInput) => Promise; }; + +export type ConversationTurnError = { + errorType: string; + message: string; +}; + +export type StreamingResponseChunk = { + // always required + conversationId: string; + associatedUserMessageId: string; + contentBlockIndex: number; + accumulatedTurnContent: Array; +} & ( + | { + // text chunk + contentBlockText: string; + contentBlockDeltaIndex: number; + contentBlockDoneAtIndex?: never; + contentBlockToolUse?: never; + stopReason?: never; + } + | { + // end of block. applicable to text blocks + contentBlockDoneAtIndex: number; + contentBlockText?: never; + contentBlockDeltaIndex?: never; + contentBlockToolUse?: never; + stopReason?: never; + } + | { + // tool use + contentBlockToolUse: string; // serialized json with full tool use block + contentBlockDoneAtIndex?: never; + contentBlockText?: never; + contentBlockDeltaIndex?: never; + stopReason?: never; + } + | { + // turn complete + stopReason: string; + contentBlockDoneAtIndex?: never; + contentBlockText?: never; + contentBlockDeltaIndex?: never; + contentBlockToolUse?: never; + } +); diff --git a/packages/ai-constructs/src/conversation/runtime/user_agent_provider.test.ts b/packages/ai-constructs/src/conversation/runtime/user_agent_provider.test.ts new file mode 100644 index 0000000000..6309e8de95 --- /dev/null +++ b/packages/ai-constructs/src/conversation/runtime/user_agent_provider.test.ts @@ -0,0 +1,65 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert'; +import * as fs from 'node:fs'; +import path from 'path'; +import { UserAgentProvider } from './user_agent_provider'; +import { ConversationTurnEvent } from './types'; + +void describe('User Agent provider', () => { + // Read package json from disk (i.e., in a different way than actual implementation does). + const packageVersion = JSON.parse( + fs.readFileSync( + path.resolve(__dirname, '..', '..', '..', 'package.json'), + 'utf-8' + ) + ).version; + + void it('adds package information as metadata when user agent is present in the event', () => { + const userAgentProvider = new UserAgentProvider({ + request: { + headers: { + 'x-amz-user-agent': 'lib/foo#1.2.3', + }, + }, + } as unknown as ConversationTurnEvent); + + const userAgent = userAgentProvider.getUserAgent(); + + assert.strictEqual( + userAgent, + `lib/foo#1.2.3 md/amplify-ai-constructs#${packageVersion}` + ); + }); + + void it('adds package information as lib when user agent is not present in the event', () => { + const userAgentProvider = new UserAgentProvider({ + request: { + headers: {}, + }, + } as unknown as ConversationTurnEvent); + + const userAgent = userAgentProvider.getUserAgent(); + + assert.strictEqual( + userAgent, + `lib/amplify-ai-constructs#${packageVersion}` + ); + }); + + void it('adds additional metadata', () => { + const userAgentProvider = new UserAgentProvider({ + request: { + headers: {}, + }, + } as unknown as ConversationTurnEvent); + + const userAgent = userAgentProvider.getUserAgent({ + 'turn-response-type': 'streaming', + }); + + assert.strictEqual( + userAgent, + `lib/amplify-ai-constructs#${packageVersion} md/turn-response-type#streaming` + ); + }); +}); diff --git a/packages/ai-constructs/src/conversation/runtime/user_agent_provider.ts b/packages/ai-constructs/src/conversation/runtime/user_agent_provider.ts new file mode 100644 index 0000000000..a958b4eb94 --- /dev/null +++ b/packages/ai-constructs/src/conversation/runtime/user_agent_provider.ts @@ -0,0 +1,53 @@ +import { ConversationTurnEvent } from './types'; + +// This is intentional. There's no other way to read package version. +// 1. The 'imports' field in package.json won't work because this is CommonJS package. +// 2. We can't use `fs.readFile`. This file is bundled by ESBuild. ESBuild needs to know to bundle package.json +// That is achievable by either require or import statements. +// 3. The package.json is outside the rootDir defined in tsconfig.json +// Imports require tsconfig to be broken down (as explained here https://stackoverflow.com/questions/55753163/package-json-is-not-under-rootdir). +// This would however would not work with our scripts that check tsconfig files for correctness. +// 4. Hardcoding version in the code, as opposed to reading package.json file isn't great option either. +// +// Therefore, using require as least problematic solution here. +// eslint-disable-next-line @typescript-eslint/no-var-requires +const packageVersion = require('../../../package.json').version; +// Compliant with https://www.rfc-editor.org/rfc/rfc5234. +const packageName = 'amplify-ai-constructs'; + +export type UserAgentAdditionalMetadata = { + // These keys are user agent friendly intentionally. + // eslint-disable-next-line @typescript-eslint/naming-convention + 'turn-response-type'?: 'single' | 'streaming' | 'error'; +}; + +/** + * Provides user agent. + */ +export class UserAgentProvider { + /** + * Creates user agent provider instance. + */ + constructor(private readonly event: ConversationTurnEvent) {} + + getUserAgent = (additionalMetadata?: UserAgentAdditionalMetadata): string => { + let userAgent = this.event.request.headers['x-amz-user-agent']; + + // append library version + if (userAgent) { + // if user agent was forwarded from AppSync then append our package information as metadata. + userAgent = `${userAgent} md/${packageName}#${packageVersion}`; + } else { + // if user agent was not forwarded use our package information as library. + userAgent = `lib/${packageName}#${packageVersion}`; + } + + if (additionalMetadata) { + Object.entries(additionalMetadata).forEach(([key, value]) => { + userAgent = `${userAgent} md/${key}#${value}`; + }); + } + + return userAgent; + }; +} diff --git a/packages/auth-construct/API.md b/packages/auth-construct/API.md index 6afb8647cb..f3c895c6ce 100644 --- a/packages/auth-construct/API.md +++ b/packages/auth-construct/API.md @@ -9,6 +9,7 @@ import { AuthResources } from '@aws-amplify/plugin-types'; import { aws_cognito } from 'aws-cdk-lib'; import { BackendOutputStorageStrategy } from '@aws-amplify/plugin-types'; import { Construct } from 'constructs'; +import { IFunction } from 'aws-cdk-lib/aws-lambda'; import { NumberAttributeConstraints } from 'aws-cdk-lib/aws-cognito'; import { ResourceProvider } from '@aws-amplify/plugin-types'; import { SecretValue } from 'aws-cdk-lib'; @@ -47,7 +48,7 @@ export type AuthProps = { externalProviders?: ExternalProviderOptions; }; senders?: { - email: Pick; + email: Pick | CustomEmailSender; }; userAttributes?: UserAttributes; multifactor?: MFA; @@ -84,6 +85,12 @@ export type CustomAttributeString = CustomAttributeBase & StringAttributeConstra dataType: 'String'; }; +// @public +export type CustomEmailSender = { + handler: IFunction; + kmsKeyArn?: string; +}; + // @public export type EmailLogin = true | EmailLoginSettings; diff --git a/packages/auth-construct/CHANGELOG.md b/packages/auth-construct/CHANGELOG.md index 3daa63b097..50e25ead8d 100644 --- a/packages/auth-construct/CHANGELOG.md +++ b/packages/auth-construct/CHANGELOG.md @@ -1,5 +1,26 @@ # @aws-amplify/auth-construct +## 1.4.0 + +### Minor Changes + +- 11d62fe: Add support for custom Lambda function email senders in Auth construct + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [b56d344] + - @aws-amplify/backend-output-storage@1.1.3 + - @aws-amplify/plugin-types@1.3.1 + +## 1.3.2 + +### Patch Changes + +- 5f46d8d: add user groups to outputs +- Updated dependencies [5f46d8d] + - @aws-amplify/backend-output-schemas@1.4.0 + ## 1.3.1 ### Patch Changes diff --git a/packages/auth-construct/package.json b/packages/auth-construct/package.json index f69909104b..d24130f5ab 100644 --- a/packages/auth-construct/package.json +++ b/packages/auth-construct/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/auth-construct", - "version": "1.3.1", + "version": "1.4.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -19,13 +19,13 @@ }, "license": "Apache-2.0", "dependencies": { - "@aws-amplify/backend-output-schemas": "^1.1.0", - "@aws-amplify/backend-output-storage": "^1.1.2", - "@aws-amplify/plugin-types": "^1.2.2", + "@aws-amplify/backend-output-schemas": "^1.4.0", + "@aws-amplify/backend-output-storage": "^1.1.3", + "@aws-amplify/plugin-types": "^1.3.1", "@aws-sdk/util-arn-parser": "^3.568.0" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } } diff --git a/packages/auth-construct/src/construct.test.ts b/packages/auth-construct/src/construct.test.ts index c2a268f7d2..d597c0222e 100644 --- a/packages/auth-construct/src/construct.test.ts +++ b/packages/auth-construct/src/construct.test.ts @@ -1098,6 +1098,7 @@ void describe('Auth construct', () => { 'oauthRedirectSignOut', 'oauthResponseType', 'oauthClientId', + 'groups', ], }, }, @@ -1480,6 +1481,34 @@ void describe('Auth construct', () => { const outputs = template.findOutputs('*'); assert.equal(outputs['socialProviders']['Value'], `["GOOGLE"]`); }); + void it('can override group precedence and correctly updates stored output', () => { + const app = new App(); + const stack = new Stack(app); + const auth = new AmplifyAuth(stack, 'test', { + loginWith: { email: true }, + groups: ['admins', 'managers'], + }); + auth.resources.groups['admins'].cfnUserGroup.precedence = 2; + const expectedGroups = [ + { + admins: { + precedence: 2, + }, + }, + { + managers: { + precedence: 1, + }, + }, + ]; + const template = Template.fromStack(stack); + template.hasResourceProperties('AWS::Cognito::UserPoolGroup', { + GroupName: 'admins', + Precedence: 2, + }); + const outputs = template.findOutputs('*'); + assert.equal(outputs['groups']['Value'], JSON.stringify(expectedGroups)); + }); }); void describe('Auth external login', () => { diff --git a/packages/auth-construct/src/construct.ts b/packages/auth-construct/src/construct.ts index 6c033aaf5b..84f9455d7b 100644 --- a/packages/auth-construct/src/construct.ts +++ b/packages/auth-construct/src/construct.ts @@ -34,6 +34,7 @@ import { UserPoolIdentityProviderOidc, UserPoolIdentityProviderSaml, UserPoolIdentityProviderSamlMetadataType, + UserPoolOperation, UserPoolProps, } from 'aws-cdk-lib/aws-cognito'; import { FederatedPrincipal, Role } from 'aws-cdk-lib/aws-iam'; @@ -51,6 +52,7 @@ import { StackMetadataBackendOutputStorageStrategy, } from '@aws-amplify/backend-output-storage'; import * as path from 'path'; +import { IKey, Key } from 'aws-cdk-lib/aws-kms'; type DefaultRoles = { auth: Role; unAuth: Role }; type IdentityProviderSetupResult = { @@ -130,6 +132,11 @@ export class AmplifyAuth role: Role; }; } = {}; + /** + * The KMS key used for encrypting custom email sender data. + * This is only set when using a custom email sender. + */ + private customEmailSenderKMSkey: IKey | undefined; /** * Create a new Auth construct with AuthProps. @@ -141,24 +148,39 @@ export class AmplifyAuth props: AuthProps = DEFAULTS.IF_NO_PROPS_PROVIDED ) { super(scope, id); - this.name = props.name ?? ''; this.domainPrefix = props.loginWith.externalProviders?.domainPrefix; - // UserPool this.computedUserPoolProps = this.getUserPoolProps(props); + this.userPool = new cognito.UserPool( this, `${this.name}UserPool`, this.computedUserPoolProps ); + /** + * Configure custom email sender for Cognito User Pool + * Grant necessary permissions for Lambda function to decrypt emails + * and allow Cognito to invoke the Lambda function + */ + if ( + props.senders?.email && + 'handler' in props.senders.email && + this.customEmailSenderKMSkey + ) { + this.customEmailSenderKMSkey.grantDecrypt(props.senders.email.handler); + this.customEmailSenderKMSkey.grantEncrypt(props.senders.email.handler); + this.userPool.addTrigger( + UserPoolOperation.of('customEmailSender'), + props.senders.email.handler + ); + } // UserPool - External Providers (Oauth, SAML, OIDC) and User Pool Domain this.providerSetupResult = this.setupExternalProviders( this.userPool, props.loginWith ); - // UserPool Client const userPoolClient = new cognito.UserPoolClient( this, @@ -478,7 +500,30 @@ export class AmplifyAuth }, { standardAttributes: {}, customAttributes: {} } ); - + /** + * Handle KMS key for custom email sender + * If a custom email sender is provided, we either use the provided KMS key ARN + * or create a new KMS key if one is not provided. + */ + if (props.senders?.email && 'handler' in props.senders.email) { + if (props.senders.email.kmsKeyArn) { + // Use the provided KMS key ARN + this.customEmailSenderKMSkey = Key.fromKeyArn( + this, + `${this.name}CustomSenderKey`, + props.senders.email.kmsKeyArn + ); + } else { + // Create a new KMS key if not provided + this.customEmailSenderKMSkey = new Key( + props.senders.email.handler.stack, + `${this.name}CustomSenderKey`, + { + enableKeyRotation: true, + } + ); + } + } const userPoolProps: UserPoolProps = { signInCaseSensitive: DEFAULTS.SIGN_IN_CASE_SENSITIVE, signInAliases: { @@ -503,15 +548,15 @@ export class AmplifyAuth customAttributes: { ...customAttributes, }, - email: props.senders - ? cognito.UserPoolEmail.withSES({ - fromEmail: props.senders.email.fromEmail, - fromName: props.senders.email.fromName, - replyTo: props.senders.email.replyTo, - sesRegion: Stack.of(this).region, - }) - : undefined, - + email: + props.senders && 'fromEmail' in props.senders.email + ? cognito.UserPoolEmail.withSES({ + fromEmail: props.senders.email.fromEmail, + fromName: props.senders.email.fromName, + replyTo: props.senders.email.replyTo, + sesRegion: Stack.of(this).region, + }) + : undefined, selfSignUpEnabled: DEFAULTS.ALLOW_SELF_SIGN_UP, mfa: mfaMode, mfaMessage: this.getMFAMessage(props.multifactor), @@ -528,6 +573,7 @@ export class AmplifyAuth props.loginWith.email?.userInvitation ) : undefined, + customSenderKmsKey: this.customEmailSenderKMSkey, }; return userPoolProps; }; @@ -1194,6 +1240,28 @@ export class AmplifyAuth }, }); + // user group precedence can be overwritten, so they are exposed via cdk LAZY + output.groups = Lazy.string({ + produce: () => { + const groupsArray: { + [key: string]: { + precedence?: number; + }; + }[] = []; + Object.keys(this.resources.groups).forEach((groupName) => { + const precedence = + this.resources.groups[groupName].cfnUserGroup.precedence; + groupsArray.push({ + [groupName]: { + precedence, + }, + }); + }, {} as Record); + + return JSON.stringify(groupsArray); + }, + }); + outputStorageStrategy.addBackendOutputEntry(authOutputKey, { version: '1', payload: output, diff --git a/packages/auth-construct/src/index.ts b/packages/auth-construct/src/index.ts index 13af450f20..85e3aa6c6c 100644 --- a/packages/auth-construct/src/index.ts +++ b/packages/auth-construct/src/index.ts @@ -26,6 +26,7 @@ export { CustomAttributeBoolean, CustomAttributeDateTime, CustomAttributeBase, + CustomEmailSender, } from './types.js'; export { AmplifyAuth } from './construct.js'; export { triggerEvents } from './trigger_events.js'; diff --git a/packages/auth-construct/src/types.ts b/packages/auth-construct/src/types.ts index 5083ffb73c..c3d4ddbbea 100644 --- a/packages/auth-construct/src/types.ts +++ b/packages/auth-construct/src/types.ts @@ -9,6 +9,7 @@ import { UserPoolIdentityProviderSamlMetadata, UserPoolSESOptions, } from 'aws-cdk-lib/aws-cognito'; +import { IFunction } from 'aws-cdk-lib/aws-lambda'; export type VerificationEmailWithLink = { /** * The type of verification. Must be one of "CODE" or "LINK". @@ -380,6 +381,14 @@ export type CustomAttribute = export type UserAttributes = StandardAttributes & Record<`custom:${string}`, CustomAttribute>; +/** + * CustomEmailSender type for configuring a custom Lambda function for email sending + */ +export type CustomEmailSender = { + handler: IFunction; + kmsKeyArn?: string; +}; + /** * Input props for the AmplifyAuth construct */ @@ -417,11 +426,15 @@ export type AuthProps = { */ senders?: { /** - * Configure Cognito to send emails from SES + * Configure Cognito to send emails from SES or a custom message trigger * SES configurations enable the use of customized email sender addresses and names + * Custom message triggers enable the use of third-party email providers when sending email notifications to users * @see https://docs.amplify.aws/react/build-a-backend/auth/moving-to-production/#email + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-custom-email-sender.html */ - email: Pick; + email: + | Pick + | CustomEmailSender; }; /** * The set of attributes that are required for every user in the user pool. Read more on attributes here - https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-settings-attributes.html diff --git a/packages/backend-ai/API.md b/packages/backend-ai/API.md index b9e044d820..2b75a01c57 100644 --- a/packages/backend-ai/API.md +++ b/packages/backend-ai/API.md @@ -4,6 +4,7 @@ ```ts +import { AiModel } from '@aws-amplify/data-schema-types'; import { ConstructFactory } from '@aws-amplify/plugin-types'; import { ConversationTurnEventVersion } from '@aws-amplify/ai-constructs/conversation'; import { FunctionResources } from '@aws-amplify/plugin-types'; @@ -49,11 +50,10 @@ type DefineConversationHandlerFunctionProps = { name: string; entry?: string; models: Array<{ - modelId: string | { - resourcePath: string; - }; + modelId: string | AiModel; region?: string; }>; + memoryMB?: number; }; // @public (undocumented) diff --git a/packages/backend-ai/CHANGELOG.md b/packages/backend-ai/CHANGELOG.md index 7b0d715433..9f8a59c759 100644 --- a/packages/backend-ai/CHANGELOG.md +++ b/packages/backend-ai/CHANGELOG.md @@ -1,5 +1,50 @@ # @aws-amplify/backend-ai +## 0.3.5 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [37dd87c] +- Updated dependencies [613bca9] +- Updated dependencies [b56d344] + - @aws-amplify/ai-constructs@0.8.0 + - @aws-amplify/backend-output-storage@1.1.3 + - @aws-amplify/plugin-types@1.3.1 + +## 0.3.4 + +### Patch Changes + +- Updated dependencies [63fb254] + - @aws-amplify/ai-constructs@0.7.0 + +## 0.3.3 + +### Patch Changes + +- bd4ff4d: Add memory setting to conversation handler +- 0d6489d: Use AiModel from data-schema-types as possible input +- Updated dependencies [5f46d8d] +- Updated dependencies [bd4ff4d] + - @aws-amplify/backend-output-schemas@1.4.0 + - @aws-amplify/ai-constructs@0.6.2 + +## 0.3.2 + +### Patch Changes + +- Updated dependencies [b6761b0] + - @aws-amplify/ai-constructs@0.6.0 + +## 0.3.1 + +### Patch Changes + +- Updated dependencies [46a0e85] +- Updated dependencies [faacd1b] + - @aws-amplify/ai-constructs@0.5.0 + ## 0.3.0 ### Minor Changes diff --git a/packages/backend-ai/package.json b/packages/backend-ai/package.json index 7711ba387f..2b4f5de691 100644 --- a/packages/backend-ai/package.json +++ b/packages/backend-ai/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-ai", - "version": "0.3.0", + "version": "0.3.5", "type": "module", "publishConfig": { "access": "public" @@ -22,14 +22,15 @@ }, "license": "Apache-2.0", "dependencies": { - "@aws-amplify/ai-constructs": "^0.4.0", - "@aws-amplify/backend-output-schemas": "^1.3.0", - "@aws-amplify/backend-output-storage": "^1.0.2", + "@aws-amplify/ai-constructs": "^0.8.0", + "@aws-amplify/backend-output-schemas": "^1.4.0", + "@aws-amplify/backend-output-storage": "^1.1.3", + "@aws-amplify/data-schema-types": "^1.2.0", "@aws-amplify/platform-core": "^1.1.0", - "@aws-amplify/plugin-types": "^1.0.1" + "@aws-amplify/plugin-types": "^1.3.1" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } } diff --git a/packages/backend-ai/src/conversation/factory.test.ts b/packages/backend-ai/src/conversation/factory.test.ts index 26ba1759be..9802e4944b 100644 --- a/packages/backend-ai/src/conversation/factory.test.ts +++ b/packages/backend-ai/src/conversation/factory.test.ts @@ -188,4 +188,19 @@ void describe('ConversationHandlerFactory', () => { }); }); }); + + void it('passes memory setting to construct', () => { + const factory = defineConversationHandlerFunction({ + entry: './test-assets/with-default-entry/handler.ts', + name: 'testHandlerName', + models: [], + memoryMB: 271, + }); + const lambda = factory.getInstance(getInstanceProps); + const template = Template.fromStack(Stack.of(lambda.resources.lambda)); + template.resourceCountIs('AWS::Lambda::Function', 1); + template.hasResourceProperties('AWS::Lambda::Function', { + MemorySize: 271, + }); + }); }); diff --git a/packages/backend-ai/src/conversation/factory.ts b/packages/backend-ai/src/conversation/factory.ts index b75b6d49c4..6e04bbbcf5 100644 --- a/packages/backend-ai/src/conversation/factory.ts +++ b/packages/backend-ai/src/conversation/factory.ts @@ -15,6 +15,7 @@ import { } from '@aws-amplify/ai-constructs/conversation'; import path from 'path'; import { CallerDirectoryExtractor } from '@aws-amplify/platform-core'; +import { AiModel } from '@aws-amplify/data-schema-types'; class ConversationHandlerFunctionGenerator implements ConstructContainerEntryGenerator @@ -42,6 +43,7 @@ class ConversationHandlerFunctionGenerator }; }), outputStorageStrategy: this.outputStorageStrategy, + memoryMB: this.props.memoryMB, }; const conversationHandlerFunction = new ConversationHandlerFunction( scope, @@ -115,14 +117,15 @@ export type DefineConversationHandlerFunctionProps = { name: string; entry?: string; models: Array<{ - modelId: - | string - | { - // This is to match return of 'a.ai.model.anthropic.claude3Haiku()' - resourcePath: string; - }; + modelId: string | AiModel; region?: string; }>; + /** + * An amount of memory (RAM) to allocate to the function between 128 and 10240 MB. + * Must be a whole number. + * Default is 512MB. + */ + memoryMB?: number; }; /** diff --git a/packages/backend-auth/API.md b/packages/backend-auth/API.md index f9b6247ced..a1c07703d7 100644 --- a/packages/backend-auth/API.md +++ b/packages/backend-auth/API.md @@ -5,6 +5,7 @@ ```ts import { AmazonProviderProps } from '@aws-amplify/auth-construct'; +import { AmplifyFunction } from '@aws-amplify/plugin-types'; import { AppleProviderProps } from '@aws-amplify/auth-construct'; import { AuthProps } from '@aws-amplify/auth-construct'; import { AuthResources } from '@aws-amplify/plugin-types'; @@ -16,12 +17,14 @@ import { ExternalProviderOptions } from '@aws-amplify/auth-construct'; import { FacebookProviderProps } from '@aws-amplify/auth-construct'; import { FunctionResources } from '@aws-amplify/plugin-types'; import { GoogleProviderProps } from '@aws-amplify/auth-construct'; +import { IFunction } from 'aws-cdk-lib/aws-lambda'; import { OidcProviderProps } from '@aws-amplify/auth-construct'; import { ResourceAccessAcceptor } from '@aws-amplify/plugin-types'; import { ResourceAccessAcceptorFactory } from '@aws-amplify/plugin-types'; import { ResourceProvider } from '@aws-amplify/plugin-types'; import { StackProvider } from '@aws-amplify/plugin-types'; import { TriggerEvent } from '@aws-amplify/auth-construct'; +import { UserPoolSESOptions } from 'aws-cdk-lib/aws-cognito'; // @public export type ActionIam = 'addUserToGroup' | 'createGroup' | 'createUser' | 'deleteGroup' | 'deleteUser' | 'deleteUserAttributes' | 'disableUser' | 'enableUser' | 'forgetDevice' | 'getDevice' | 'getGroup' | 'getUser' | 'listUsers' | 'listUsersInGroup' | 'listGroups' | 'listDevices' | 'listGroupsForUser' | 'removeUserFromGroup' | 'resetUserPassword' | 'setUserMfaPreference' | 'setUserPassword' | 'setUserSettings' | 'updateDeviceStatus' | 'updateGroup' | 'updateUserAttributes'; @@ -36,10 +39,13 @@ export type AmazonProviderFactoryProps = Omit & { +export type AmplifyAuthProps = Expand & { loginWith: Expand; triggers?: Partial>>>; access?: AuthAccessGenerator; + senders?: { + email: Pick | CustomEmailSender; + }; }>; // @public @@ -80,6 +86,12 @@ export type AuthLoginWithFactoryProps = Omit & ResourceAccessAcceptorFactory & StackProvider; +// @public +export type CustomEmailSender = { + handler: ConstructFactory | IFunction; + kmsKeyArn?: string; +}; + // @public export const defineAuth: (props: AmplifyAuthProps) => ConstructFactory; diff --git a/packages/backend-auth/CHANGELOG.md b/packages/backend-auth/CHANGELOG.md index 8aa5b2b4a3..9359e6b29e 100644 --- a/packages/backend-auth/CHANGELOG.md +++ b/packages/backend-auth/CHANGELOG.md @@ -1,5 +1,20 @@ # @aws-amplify/backend-auth +## 1.3.0 + +### Minor Changes + +- 11d62fe: Add support for custom Lambda function email senders in Auth construct + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [11d62fe] +- Updated dependencies [b56d344] + - @aws-amplify/auth-construct@1.4.0 + - @aws-amplify/backend-output-storage@1.1.3 + - @aws-amplify/plugin-types@1.3.1 + ## 1.2.0 ### Minor Changes diff --git a/packages/backend-auth/package.json b/packages/backend-auth/package.json index c9bddfe7c9..dd5dab4e33 100644 --- a/packages/backend-auth/package.json +++ b/packages/backend-auth/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-auth", - "version": "1.2.0", + "version": "1.3.0", "type": "module", "publishConfig": { "access": "public" @@ -19,16 +19,16 @@ }, "license": "Apache-2.0", "dependencies": { - "@aws-amplify/auth-construct": "^1.3.1", - "@aws-amplify/backend-output-storage": "^1.1.2", - "@aws-amplify/plugin-types": "^1.3.0" + "@aws-amplify/auth-construct": "^1.4.0", + "@aws-amplify/backend-output-storage": "^1.1.3", + "@aws-amplify/plugin-types": "^1.3.1" }, "devDependencies": { - "@aws-amplify/backend-platform-test-stubs": "^0.3.5", + "@aws-amplify/backend-platform-test-stubs": "^0.3.6", "@aws-amplify/platform-core": "^1.0.6" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } } diff --git a/packages/backend-auth/src/factory.test.ts b/packages/backend-auth/src/factory.test.ts index bbf285b7cd..a01c5f5935 100644 --- a/packages/backend-auth/src/factory.test.ts +++ b/packages/backend-auth/src/factory.test.ts @@ -26,6 +26,8 @@ import { import { Policy, PolicyStatement } from 'aws-cdk-lib/aws-iam'; import { AmplifyUserError } from '@aws-amplify/platform-core'; import { CfnFunction } from 'aws-cdk-lib/aws-lambda'; +import { Key } from 'aws-cdk-lib/aws-kms'; +import { CustomEmailSender } from './types.js'; const createStackAndSetContext = (): Stack => { const app = new App(); @@ -355,6 +357,144 @@ void describe('AmplifyAuthFactory', () => { }); }); }); + + void it('sets customEmailSender when function is provided as email sender', () => { + const testFunc = new aws_lambda.Function(stack, 'testFunc', { + code: aws_lambda.Code.fromInline('test placeholder'), + runtime: aws_lambda.Runtime.NODEJS_18_X, + handler: 'index.handler', + }); + const funcStub: ConstructFactory> = { + getInstance: () => { + return { + resources: { + lambda: testFunc, + cfnResources: { + cfnFunction: testFunc.node.findChild('Resource') as CfnFunction, + }, + }, + }; + }, + }; + const customEmailSender: CustomEmailSender = { + handler: funcStub, + }; + resetFactoryCount(); + + const authWithTriggerFactory = defineAuth({ + loginWith: { email: true }, + senders: { email: customEmailSender }, + }); + + const backendAuth = authWithTriggerFactory.getInstance(getInstanceProps); + + const template = Template.fromStack(backendAuth.stack); + + template.hasResourceProperties('AWS::Cognito::UserPool', { + LambdaConfig: { + CustomEmailSender: { + LambdaArn: { + Ref: Match.stringLikeRegexp('testFunc'), + }, + }, + KMSKeyID: { + Ref: Match.stringLikeRegexp('CustomSenderKey'), + }, + }, + }); + }); + void it('ensures empty lambdaTriggers do not remove triggers added elsewhere', () => { + const testFunc = new aws_lambda.Function(stack, 'testFunc', { + code: aws_lambda.Code.fromInline('test placeholder'), + runtime: aws_lambda.Runtime.NODEJS_18_X, + handler: 'index.handler', + }); + const funcStub: ConstructFactory> = { + getInstance: () => { + return { + resources: { + lambda: testFunc, + cfnResources: { + cfnFunction: testFunc.node.findChild('Resource') as CfnFunction, + }, + }, + }; + }, + }; + const customEmailSender: CustomEmailSender = { + handler: funcStub, + }; + resetFactoryCount(); + + const authWithTriggerFactory = defineAuth({ + loginWith: { email: true }, + senders: { email: customEmailSender }, + triggers: { preSignUp: funcStub }, + }); + + const backendAuth = authWithTriggerFactory.getInstance(getInstanceProps); + + const template = Template.fromStack(backendAuth.stack); + template.hasResourceProperties('AWS::Cognito::UserPool', { + LambdaConfig: { + PreSignUp: { + Ref: Match.stringLikeRegexp('testFunc'), + }, + CustomEmailSender: { + LambdaArn: { + Ref: Match.stringLikeRegexp('testFunc'), + }, + }, + KMSKeyID: { + Ref: Match.stringLikeRegexp('CustomSenderKey'), + }, + }, + }); + }); + void it('uses provided KMS key ARN and sets up custom email sender', () => { + const customKmsKeyArn = new Key(stack, `CustomSenderKey`, { + enableKeyRotation: true, + }); + const testFunc = new aws_lambda.Function(stack, 'testFunc', { + code: aws_lambda.Code.fromInline('test placeholder'), + runtime: aws_lambda.Runtime.NODEJS_18_X, + handler: 'index.handler', + }); + const funcStub: ConstructFactory> = { + getInstance: () => ({ + resources: { + lambda: testFunc, + cfnResources: { + cfnFunction: testFunc.node.findChild('Resource') as CfnFunction, + }, + }, + }), + }; + const customEmailSender: CustomEmailSender = { + handler: funcStub, + kmsKeyArn: customKmsKeyArn.keyArn, + }; + resetFactoryCount(); + + const authWithTriggerFactory = defineAuth({ + loginWith: { email: true }, + senders: { + email: customEmailSender, + }, + triggers: { preSignUp: funcStub }, + }); + + const backendAuth = authWithTriggerFactory.getInstance(getInstanceProps); + const template = Template.fromStack(backendAuth.stack); + + template.hasResourceProperties('AWS::Cognito::UserPool', { + LambdaConfig: { + KMSKeyID: { + Ref: Match.stringLikeRegexp('CustomSenderKey'), + }, + }, + }); + }); }); const upperCaseFirstChar = (str: string) => { diff --git a/packages/backend-auth/src/factory.ts b/packages/backend-auth/src/factory.ts index 02edd4695c..48d411f07d 100644 --- a/packages/backend-auth/src/factory.ts +++ b/packages/backend-auth/src/factory.ts @@ -1,6 +1,10 @@ import * as path from 'path'; import { Policy } from 'aws-cdk-lib/aws-iam'; -import { UserPool, UserPoolOperation } from 'aws-cdk-lib/aws-cognito'; +import { + UserPool, + UserPoolOperation, + UserPoolSESOptions, +} from 'aws-cdk-lib/aws-cognito'; import { AmplifyUserError, TagName } from '@aws-amplify/platform-core'; import { AmplifyAuth, @@ -20,12 +24,16 @@ import { ResourceProvider, StackProvider, } from '@aws-amplify/plugin-types'; -import { translateToAuthConstructLoginWith } from './translate_auth_props.js'; +import { + translateToAuthConstructLoginWith, + translateToAuthConstructSenders, +} from './translate_auth_props.js'; import { authAccessBuilder as _authAccessBuilder } from './access_builder.js'; import { AuthAccessPolicyArbiterFactory } from './auth_access_policy_arbiter.js'; import { AuthAccessGenerator, AuthLoginWithFactoryProps, + CustomEmailSender, Expand, } from './types.js'; import { UserPoolAccessPolicyFactory } from './userpool_access_policy_factory.js'; @@ -36,7 +44,7 @@ export type BackendAuth = ResourceProvider & StackProvider; export type AmplifyAuthProps = Expand< - Omit & { + Omit & { /** * Specify how you would like users to log in. You can choose from email, phone, and even external providers such as LoginWithAmazon. */ @@ -60,6 +68,14 @@ export type AmplifyAuthProps = Expand< * access: (allow) => [allow.resource(groupManager).to(["manageGroups"])] */ access?: AuthAccessGenerator; + /** + * Configure email sender options + */ + senders?: { + email: + | Pick + | CustomEmailSender; + }; } >; @@ -142,6 +158,10 @@ class AmplifyAuthGenerator implements ConstructContainerEntryGenerator { this.props.loginWith, backendSecretResolver ), + senders: translateToAuthConstructSenders( + this.props.senders, + this.getInstanceProps + ), outputStorageStrategy: this.getInstanceProps.outputStorageStrategy, }; if (authProps.loginWith.externalProviders) { diff --git a/packages/backend-auth/src/translate_auth_props.ts b/packages/backend-auth/src/translate_auth_props.ts index f4b20fff37..fad144ef6b 100644 --- a/packages/backend-auth/src/translate_auth_props.ts +++ b/packages/backend-auth/src/translate_auth_props.ts @@ -6,7 +6,10 @@ import { GoogleProviderProps, OidcProviderProps, } from '@aws-amplify/auth-construct'; -import { BackendSecretResolver } from '@aws-amplify/plugin-types'; +import { + BackendSecretResolver, + ConstructFactoryGetInstanceProps, +} from '@aws-amplify/plugin-types'; import { AmazonProviderFactoryProps, AppleProviderFactoryProps, @@ -16,6 +19,8 @@ import { GoogleProviderFactoryProps, OidcProviderFactoryProps, } from './types.js'; +import { IFunction } from 'aws-cdk-lib/aws-lambda'; +import { AmplifyAuthProps } from './factory.js'; /** * Translate an Auth factory's loginWith to its Auth construct counterpart. Backend secret fields will be resolved @@ -79,6 +84,52 @@ export const translateToAuthConstructLoginWith = ( return result; }; +/** + * Translates the senders property from AmplifyAuthProps to AuthProps format. + * @param senders - The senders object from AmplifyAuthProps. + * @param getInstanceProps - Properties used to get an instance of the sender. + * @returns The translated senders object in AuthProps format, or undefined if no valid sender is provided. + * @description + * This function handles the translation of the 'senders' property, specifically for email senders. + * If no senders are provided or if there's no email sender, it returns undefined. + * If the email sender has a 'getInstance' method, it retrieves the Lambda function and returns it. + * Otherwise, it returns the email sender as is. + */ +export const translateToAuthConstructSenders = ( + senders: AmplifyAuthProps['senders'] | undefined, + getInstanceProps: ConstructFactoryGetInstanceProps +): AuthProps['senders'] | undefined => { + if (!senders || !senders.email) { + return undefined; + } + + // Handle CustomEmailSender type + if ('handler' in senders.email) { + const lambda: IFunction = + 'getInstance' in senders.email.handler + ? senders.email.handler.getInstance(getInstanceProps).resources.lambda + : senders.email.handler; + + return { + email: { + handler: lambda, + kmsKeyArn: senders.email.kmsKeyArn, + }, + }; + } + + // Handle SES configuration + if ('fromEmail' in senders.email) { + return { + email: senders.email, + }; + } + + // If none of the above, return the email configuration as-is + return { + email: senders.email, + }; +}; const translateAmazonProps = ( backendSecretResolver: BackendSecretResolver, diff --git a/packages/backend-auth/src/types.ts b/packages/backend-auth/src/types.ts index 8b7c018feb..46c199f1a1 100644 --- a/packages/backend-auth/src/types.ts +++ b/packages/backend-auth/src/types.ts @@ -8,6 +8,7 @@ import { OidcProviderProps, } from '@aws-amplify/auth-construct'; import { + AmplifyFunction, BackendSecret, ConstructFactory, ConstructFactoryGetInstanceProps, @@ -15,6 +16,7 @@ import { ResourceAccessAcceptorFactory, ResourceProvider, } from '@aws-amplify/plugin-types'; +import { IFunction } from 'aws-cdk-lib/aws-lambda'; /** * This utility allows us to expand nested types in auto complete prompts. @@ -252,3 +254,11 @@ export type ActionIam = | 'updateDeviceStatus' | 'updateGroup' | 'updateUserAttributes'; + +/** + * CustomEmailSender type for configuring a custom Lambda function for email sending + */ +export type CustomEmailSender = { + handler: ConstructFactory | IFunction; + kmsKeyArn?: string; +}; diff --git a/packages/backend-data/CHANGELOG.md b/packages/backend-data/CHANGELOG.md index 8434975536..99884312f3 100644 --- a/packages/backend-data/CHANGELOG.md +++ b/packages/backend-data/CHANGELOG.md @@ -1,5 +1,28 @@ # @aws-amplify/backend-data +## 1.1.7 + +### Patch Changes + +- 583a3f2: Fix detection of AmplifyErrors + +## 1.1.6 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [b56d344] + - @aws-amplify/backend-output-storage@1.1.3 + - @aws-amplify/plugin-types@1.3.1 + +## 1.1.5 + +### Patch Changes + +- 0d6489d: Update data-schema-types +- Updated dependencies [5f46d8d] + - @aws-amplify/backend-output-schemas@1.4.0 + ## 1.1.4 ### Patch Changes diff --git a/packages/backend-data/package.json b/packages/backend-data/package.json index 3f31e6e507..69d5f24597 100644 --- a/packages/backend-data/package.json +++ b/packages/backend-data/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-data", - "version": "1.1.4", + "version": "1.1.7", "type": "module", "publishConfig": { "access": "public" @@ -20,18 +20,18 @@ "license": "Apache-2.0", "devDependencies": { "@aws-amplify/data-schema": "^1.0.0", - "@aws-amplify/backend-platform-test-stubs": "^0.3.5", - "@aws-amplify/platform-core": "^1.0.7" + "@aws-amplify/backend-platform-test-stubs": "^0.3.6", + "@aws-amplify/platform-core": "^1.2.0" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" }, "dependencies": { - "@aws-amplify/backend-output-storage": "^1.1.2", - "@aws-amplify/backend-output-schemas": "^1.1.0", + "@aws-amplify/backend-output-storage": "^1.1.3", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/data-construct": "^1.10.1", - "@aws-amplify/plugin-types": "^1.2.2", - "@aws-amplify/data-schema-types": "^1.1.1" + "@aws-amplify/plugin-types": "^1.3.1", + "@aws-amplify/data-schema-types": "^1.2.0" } } diff --git a/packages/backend-data/src/factory.ts b/packages/backend-data/src/factory.ts index c40ad8db70..ebe3d1b083 100644 --- a/packages/backend-data/src/factory.ts +++ b/packages/backend-data/src/factory.ts @@ -184,7 +184,7 @@ class DataGenerator implements ConstructContainerEntryGenerator { this.props.authorizationModes ); } catch (error) { - if (error instanceof AmplifyError) { + if (AmplifyError.isAmplifyError(error)) { throw error; } throw new AmplifyUserError( diff --git a/packages/backend-deployer/CHANGELOG.md b/packages/backend-deployer/CHANGELOG.md index 944794ffd4..63b1f4ec38 100644 --- a/packages/backend-deployer/CHANGELOG.md +++ b/packages/backend-deployer/CHANGELOG.md @@ -1,5 +1,37 @@ # @aws-amplify/backend-deployer +## 1.1.8 + +### Patch Changes + +- 583a3f2: Fix detection of AmplifyErrors +- Updated dependencies [583a3f2] + - @aws-amplify/platform-core@1.2.0 + +## 1.1.7 + +### Patch Changes + +- 7bf0c64: reclassify as error, UnknownFault, Error: The security token included in the request is expired +- 889bdb7: Handle case where synthesis renders empty cdk assembly +- a191fe5: add stack is in a state and can not be updated to error mapper + +## 1.1.6 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [b56d344] + - @aws-amplify/plugin-types@1.3.1 + +## 1.1.5 + +### Patch Changes + +- 93d419f: detect more generic CFN deployment failure errors +- 777c80d: detect transform errors with multiple errors +- b35f01d: detect generic CFN stack creation errors + ## 1.1.4 ### Patch Changes diff --git a/packages/backend-deployer/package.json b/packages/backend-deployer/package.json index 8e2be97e38..555ebe641a 100644 --- a/packages/backend-deployer/package.json +++ b/packages/backend-deployer/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-deployer", - "version": "1.1.4", + "version": "1.1.8", "type": "module", "publishConfig": { "access": "public" @@ -19,13 +19,13 @@ }, "license": "Apache-2.0", "dependencies": { - "@aws-amplify/platform-core": "^1.0.6", - "@aws-amplify/plugin-types": "^1.2.2", + "@aws-amplify/platform-core": "^1.2.0", + "@aws-amplify/plugin-types": "^1.3.1", "execa": "^8.0.1", "tsx": "^4.6.1" }, "peerDependencies": { - "aws-cdk": "^2.152.0", + "aws-cdk": "^2.158.0", "typescript": "^5.0.0" } } diff --git a/packages/backend-deployer/src/cdk_deployer.ts b/packages/backend-deployer/src/cdk_deployer.ts index 586f68d746..0771673117 100644 --- a/packages/backend-deployer/src/cdk_deployer.ts +++ b/packages/backend-deployer/src/cdk_deployer.ts @@ -86,7 +86,7 @@ export class CDKDeployer implements BackendDeployer { } catch (typeError: unknown) { if ( synthError && - typeError instanceof AmplifyError && + AmplifyError.isAmplifyError(typeError) && typeError.cause?.message.match( /Cannot find module '\$amplify\/env\/.*' or its corresponding type declarations/ ) diff --git a/packages/backend-deployer/src/cdk_error_mapper.test.ts b/packages/backend-deployer/src/cdk_error_mapper.test.ts index 45f36f61e2..20f01df933 100644 --- a/packages/backend-deployer/src/cdk_error_mapper.test.ts +++ b/packages/backend-deployer/src/cdk_error_mapper.test.ts @@ -22,6 +22,15 @@ const testErrorMappings = [ errorName: 'ExpiredTokenError', expectedDownstreamErrorMessage: 'ExpiredToken', }, + { + errorMessage: + 'Error: The security token included in the request is expired', + expectedTopLevelErrorMessage: + 'The security token included in the request is invalid.', + errorName: 'ExpiredTokenError', + expectedDownstreamErrorMessage: + 'Error: The security token included in the request is expired', + }, { errorMessage: 'Access Denied', expectedTopLevelErrorMessage: @@ -87,6 +96,26 @@ const testErrorMappings = [ errorName: 'BootstrapNotDetectedError', expectedDownstreamErrorMessage: 'Is this account bootstrapped', }, + { + errorMessage: + // eslint-disable-next-line spellcheck/spell-checker + "This CDK deployment requires bootstrap stack version '6', but during the confirmation via SSM parameter /cdk-bootstrap/hnb659fds/version the following error occurred: AccessDeniedException", + expectedTopLevelErrorMessage: + 'Unable to detect CDK bootstrap stack due to permission issues.', + errorName: 'BootstrapDetectionError', + expectedDownstreamErrorMessage: + // eslint-disable-next-line spellcheck/spell-checker + "This CDK deployment requires bootstrap stack version '6', but during the confirmation via SSM parameter /cdk-bootstrap/hnb659fds/version the following error occurred: AccessDeniedException", + }, + { + errorMessage: + "This CDK deployment requires bootstrap stack version '6', found '5'. Please run 'cdk bootstrap'.", + expectedTopLevelErrorMessage: + 'This AWS account and region has outdated CDK bootstrap stack.', + errorName: 'BootstrapOutdatedError', + expectedDownstreamErrorMessage: + "This CDK deployment requires bootstrap stack version '6', found '5'. Please run 'cdk bootstrap'.", + }, { errorMessage: 'Amplify Backend not found in amplify/backend.ts', expectedTopLevelErrorMessage: @@ -139,6 +168,12 @@ const testErrorMappings = [ errorName: 'CloudFormationDeploymentError', expectedDownstreamErrorMessage: `The stack named some-stack failed to deploy: UPDATE_ROLLBACK_COMPLETE: Resource handler returned message: The code contains one or more errors. (Service: AppSync, Status Code: 400, Request ID: 12345) (RequestToken: 123, HandlerErrorCode: GeneralServiceException), Embedded stack was not successfully updated. Currently in UPDATE_ROLLBACK_IN_PROGRESS with reason: The following resource(s) failed to create: [resource1, resource2]. [39m`, }, + { + errorMessage: `[31m some-stack failed: The stack named some-stack failed creation, it may need to be manually deleted from the AWS console: ROLLBACK_COMPLETE`, + expectedTopLevelErrorMessage: 'The CloudFormation deployment has failed.', + errorName: 'CloudFormationDeploymentError', + expectedDownstreamErrorMessage: `The stack named some-stack failed creation, it may need to be manually deleted from the AWS console: ROLLBACK_COMPLETE`, + }, { errorMessage: 'CFN error happened: Updates are not allowed for property: some property', @@ -221,6 +256,22 @@ const testErrorMappings = [ errorName: 'ESBuildError', expectedDownstreamErrorMessage: undefined, }, + { + errorMessage: + `Error [TransformError]: Transform failed with 2 errors:` + + EOL + + `/Users/user/work-space/amplify-app/amplify/auth/resource.ts:48:4: ERROR: Multiple exports with the same name auth` + + EOL + + `/Users/user/work-space/amplify-app/amplify/auth/resource.ts:48:4: ERROR: The symbol auth has already been declared` + + EOL + + ` at failureErrorWithLog (/Users/user/work-space/amplify-app/node_modules/tsx/node_modules/esbuild/lib/main.js:1472:15)`, + expectedTopLevelErrorMessage: + `/Users/user/work-space/amplify-app/amplify/auth/resource.ts:48:4: ERROR: Multiple exports with the same name auth` + + EOL + + `/Users/user/work-space/amplify-app/amplify/auth/resource.ts:48:4: ERROR: The symbol auth has already been declared`, + errorName: 'ESBuildError', + expectedDownstreamErrorMessage: undefined, + }, { errorMessage: `some rubbish before` + @@ -308,6 +359,27 @@ const testErrorMappings = [ expectedDownstreamErrorMessage: `This CDK CLI is not compatible with the CDK library used by your application. Please upgrade the CLI to the latest version. (Cloud assembly schema version mismatch: Maximum schema version supported is 36.0.0, but found 36.1.1)`, }, + { + errorMessage: `[31m amplify-some-stack failed: ValidationError: Stack:stack-arn is in UPDATE_ROLLBACK_FAILED state and can not be updated.`, + expectedTopLevelErrorMessage: + 'The CloudFormation deployment failed due to amplify-some-stack being in UPDATE_ROLLBACK_FAILED state.', + errorName: 'CloudFormationDeploymentError', + expectedDownstreamErrorMessage: undefined, + }, + { + errorMessage: `ENOENT: no such file or directory, open '.amplify/artifacts/cdk.out/manifest.json'`, + expectedTopLevelErrorMessage: + 'The Amplify backend definition is missing `defineBackend` call.', + errorName: 'MissingDefineBackendError', + expectedDownstreamErrorMessage: undefined, + }, + { + errorMessage: `ENOENT: no such file or directory, open '.amplify\\artifacts\\cdk.out\\manifest.json'`, + expectedTopLevelErrorMessage: + 'The Amplify backend definition is missing `defineBackend` call.', + errorName: 'MissingDefineBackendError', + expectedDownstreamErrorMessage: undefined, + }, ]; void describe('invokeCDKCommand', { concurrency: 1 }, () => { diff --git a/packages/backend-deployer/src/cdk_error_mapper.ts b/packages/backend-deployer/src/cdk_error_mapper.ts index fe5246ff3a..994791f4e3 100644 --- a/packages/backend-deployer/src/cdk_error_mapper.ts +++ b/packages/backend-deployer/src/cdk_error_mapper.ts @@ -50,6 +50,12 @@ export class CdkErrorMapper { underlyingError = undefined; } } + // remove any trailing EOL + matchingError.humanReadableErrorMessage = + matchingError.humanReadableErrorMessage.replace( + new RegExp(`${this.multiLineEolRegex}$`), + '' + ); } else { underlyingError.message = matchGroups[0]; } @@ -84,10 +90,12 @@ export class CdkErrorMapper { classification: AmplifyErrorClassification; }> => [ { - errorRegex: /ExpiredToken/, + errorRegex: + /ExpiredToken|Error: The security token included in the request is expired/, humanReadableErrorMessage: 'The security token included in the request is invalid.', - resolutionMessage: 'Ensure your local AWS credentials are valid.', + resolutionMessage: + "Please update your AWS credentials. You can do this by running `aws configure` or by updating your AWS credentials file. If you're using temporary credentials, you may need to obtain new ones.", errorName: 'ExpiredTokenError', classification: 'ERROR', }, @@ -110,6 +118,26 @@ export class CdkErrorMapper { errorName: 'BootstrapNotDetectedError', classification: 'ERROR', }, + { + errorRegex: + /This CDK deployment requires bootstrap stack version \S+, found \S+\. Please run 'cdk bootstrap'\./, + humanReadableErrorMessage: + 'This AWS account and region has outdated CDK bootstrap stack.', + resolutionMessage: + 'Run `cdk bootstrap aws://{YOUR_ACCOUNT_ID}/{YOUR_REGION}` locally to re-bootstrap.', + errorName: 'BootstrapOutdatedError', + classification: 'ERROR', + }, + { + errorRegex: + /This CDK deployment requires bootstrap stack version \S+, but during the confirmation via SSM parameter \S+ the following error occurred: AccessDeniedException/, + humanReadableErrorMessage: + 'Unable to detect CDK bootstrap stack due to permission issues.', + resolutionMessage: + "Ensure that AWS credentials have an IAM policy that grants read access to 'arn:aws:ssm:*:*:parameter/cdk-bootstrap/*' SSM parameters.", + errorName: 'BootstrapDetectionError', + classification: 'ERROR', + }, { errorRegex: /This CDK CLI is not compatible with the CDK library used by your application\. Please upgrade the CLI to the latest version\./, @@ -193,8 +221,9 @@ export class CdkErrorMapper { classification: 'ERROR', }, { + // If there are multiple errors, capture all lines containing the errors errorRegex: new RegExp( - `\\[TransformError\\]: Transform failed with .* error:${this.multiLineEolRegex}(?.*)` + `\\[TransformError\\]: Transform failed with .* error(s?):${this.multiLineEolRegex}(?(.*ERROR:.*${this.multiLineEolRegex})+)` ), humanReadableErrorMessage: '{esBuildErrorMessage}', resolutionMessage: @@ -254,6 +283,20 @@ export class CdkErrorMapper { errorName: 'BackendSynthError', classification: 'ERROR', }, + { + // This happens when 'defineBackend' call is missing in customer's app. + // 'defineBackend' creates CDK app in memory. If it's missing then no cdk.App exists in memory and nothing is rendered. + // During 'cdk synth' CDK CLI attempts to read CDK assembly after calling customer's app. + // But no files are rendered causing it to fail. + errorRegex: + /ENOENT: no such file or directory, open '\.amplify.artifacts.cdk\.out.manifest\.json'/, + humanReadableErrorMessage: + 'The Amplify backend definition is missing `defineBackend` call.', + resolutionMessage: + 'Check your backend definition in the `amplify` folder. Ensure that `amplify/backend.ts` contains `defineBackend` call.', + errorName: 'MissingDefineBackendError', + classification: 'ERROR', + }, { // "Catch all": the backend entry point file is referenced in the stack indicating a problem in customer code errorRegex: /amplify\/backend/, @@ -274,10 +317,20 @@ export class CdkErrorMapper { errorName: 'SecretNotSetError', classification: 'ERROR', }, + { + errorRegex: + /(?amplify-[a-z0-9-]+)(.*) failed: ValidationError: Stack:(.*) is in (?.*) state and can not be updated/, + humanReadableErrorMessage: + 'The CloudFormation deployment failed due to {stackName} being in {state} state.', + resolutionMessage: + 'Find more information in the CloudFormation AWS Console for this stack.', + errorName: 'CloudFormationDeploymentError', + classification: 'ERROR', + }, { // Note that the order matters, this should be the last as it captures generic CFN error errorRegex: new RegExp( - `Deployment failed: (.*)${this.multiLineEolRegex}|The stack named (.*) failed to deploy: (.*)` + `Deployment failed: (.*)${this.multiLineEolRegex}|The stack named (.*) failed (to deploy:|creation,) (.*)` ), humanReadableErrorMessage: 'The CloudFormation deployment has failed.', resolutionMessage: @@ -293,16 +346,18 @@ export type CDKDeploymentError = | 'BackendBuildError' | 'BackendSynthError' | 'BootstrapNotDetectedError' + | 'BootstrapDetectionError' + | 'BootstrapOutdatedError' | 'CDKResolveAWSAccountError' | 'CDKVersionMismatchError' | 'CFNUpdateNotSupportedError' | 'CloudFormationDeploymentError' | 'FilePermissionsError' + | 'MissingDefineBackendError' | 'MultipleSandboxInstancesError' | 'ESBuildError' | 'ExpiredTokenError' | 'FileConventionError' - | 'FileConventionError' | 'ModuleNotFoundError' | 'SecretNotSetError' | 'SyntaxError'; diff --git a/packages/backend-function/API.md b/packages/backend-function/API.md index c036670638..7348dba85e 100644 --- a/packages/backend-function/API.md +++ b/packages/backend-function/API.md @@ -22,6 +22,11 @@ export type CronSchedule = `${string} ${string} ${string} ${string} ${string}` | // @public export const defineFunction: (props?: FunctionProps) => ConstructFactory & ResourceAccessAcceptorFactory & AddEnvironmentFactory & StackProvider>; +// @public (undocumented) +export type FunctionBundlingOptions = { + minify?: boolean; +}; + // @public (undocumented) export type FunctionProps = { name?: string; @@ -32,6 +37,7 @@ export type FunctionProps = { runtime?: NodeVersion; schedule?: FunctionSchedule | FunctionSchedule[]; layers?: Record; + bundling?: FunctionBundlingOptions; }; // @public (undocumented) diff --git a/packages/backend-function/CHANGELOG.md b/packages/backend-function/CHANGELOG.md index 4cb76fefa5..26030ae2eb 100644 --- a/packages/backend-function/CHANGELOG.md +++ b/packages/backend-function/CHANGELOG.md @@ -1,5 +1,40 @@ # @aws-amplify/backend-function +## 1.7.4 + +### Patch Changes + +- 4e97389: add validation if layer arn region does not match function region + +## 1.7.3 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [b56d344] + - @aws-amplify/backend-output-storage@1.1.3 + - @aws-amplify/plugin-types@1.3.1 + +## 1.7.2 + +### Patch Changes + +- 601a2c1: dedupe environment variables in amplify env type generator + +## 1.7.1 + +### Patch Changes + +- bd4ff4d: Fix jsdocs that incorrectly state default memory settings +- Updated dependencies [5f46d8d] + - @aws-amplify/backend-output-schemas@1.4.0 + +## 1.7.0 + +### Minor Changes + +- 4720412: Add minify option to defineFunction + ## 1.6.0 ### Minor Changes diff --git a/packages/backend-function/package.json b/packages/backend-function/package.json index d4a4c3b60f..2f51b13678 100644 --- a/packages/backend-function/package.json +++ b/packages/backend-function/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-function", - "version": "1.6.0", + "version": "1.7.4", "type": "module", "publishConfig": { "access": "public" @@ -19,20 +19,20 @@ }, "license": "Apache-2.0", "dependencies": { - "@aws-amplify/backend-output-schemas": "^1.1.0", - "@aws-amplify/backend-output-storage": "^1.1.2", - "@aws-amplify/plugin-types": "^1.3.0", + "@aws-amplify/backend-output-schemas": "^1.4.0", + "@aws-amplify/backend-output-storage": "^1.1.3", + "@aws-amplify/plugin-types": "^1.3.1", "execa": "^8.0.1" }, "devDependencies": { - "@aws-amplify/backend-platform-test-stubs": "^0.3.5", + "@aws-amplify/backend-platform-test-stubs": "^0.3.6", "@aws-amplify/platform-core": "^1.1.0", "@aws-sdk/client-ssm": "^3.624.0", "aws-sdk": "^2.1550.0", "uuid": "^9.0.1" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } } diff --git a/packages/backend-function/src/factory.test.ts b/packages/backend-function/src/factory.test.ts index 477ce69aec..872e91b5bf 100644 --- a/packages/backend-function/src/factory.test.ts +++ b/packages/backend-function/src/factory.test.ts @@ -17,6 +17,7 @@ import { NodeVersion, defineFunction } from './factory.js'; import { lambdaWithDependencies } from './test-assets/lambda-with-dependencies/resource.js'; import { Runtime } from 'aws-cdk-lib/aws-lambda'; import { Policy, PolicyStatement } from 'aws-cdk-lib/aws-iam'; +import { AmplifyUserError } from '@aws-amplify/platform-core'; const createStackAndSetContext = (): Stack => { const app = new App(); @@ -411,6 +412,57 @@ void describe('AmplifyFunctionFactory', () => { }); }); + void describe('layers property', () => { + void it('defaults to no layers', () => { + const lambda = defineFunction({ + entry: './test-assets/default-lambda/handler.ts', + }).getInstance(getInstanceProps); + const template = Template.fromStack(lambda.stack); + + template.resourceCountIs('AWS::Lambda::LayerVersion', 0); + }); + + void it('throws if layer arn region is not the same as function region', () => { + assert.throws( + () => + defineFunction({ + entry: './test-assets/default-lambda/handler.ts', + layers: { + layer1: + 'arn:aws:lambda:some-region:123456789012:layer:my-layer-1:1', + }, + }).getInstance(getInstanceProps), + (error: AmplifyUserError) => { + assert.strictEqual( + error.message, + 'Region in ARN does not match function region for layer: layer1' + ); + assert.ok(error.resolution); + return true; + } + ); + }); + }); + + void describe('minify property', () => { + void it('sets minify to false', () => { + const lambda = defineFunction({ + entry: './test-assets/default-lambda/handler.ts', + bundling: { + minify: false, + }, + }).getInstance(getInstanceProps); + const template = Template.fromStack(lambda.stack); + // There isn't a way to check the contents of the bundled lambda using the CDK Template utility + // So we just check that the lambda was created properly in the CFN template. + // There is an e2e test that validates proper lambda bundling + template.resourceCountIs('AWS::Lambda::Function', 1); + template.hasResourceProperties('AWS::Lambda::Function', { + Handler: 'index.handler', + }); + }); + }); + void describe('resourceAccessAcceptor', () => { void it('attaches policy to execution role and configures ssm environment context', () => { const functionFactory = defineFunction({ diff --git a/packages/backend-function/src/factory.ts b/packages/backend-function/src/factory.ts index a0f6feb06b..511bf320e6 100644 --- a/packages/backend-function/src/factory.ts +++ b/packages/backend-function/src/factory.ts @@ -23,16 +23,11 @@ import { SsmEnvironmentEntry, StackProvider, } from '@aws-amplify/plugin-types'; -import { Duration, Stack, Tags } from 'aws-cdk-lib'; +import { Duration, Lazy, Stack, Tags, Token } from 'aws-cdk-lib'; import { Rule } from 'aws-cdk-lib/aws-events'; import * as targets from 'aws-cdk-lib/aws-events-targets'; import { Policy } from 'aws-cdk-lib/aws-iam'; -import { - CfnFunction, - ILayerVersion, - LayerVersion, - Runtime, -} from 'aws-cdk-lib/aws-lambda'; +import { CfnFunction, LayerVersion, Runtime } from 'aws-cdk-lib/aws-lambda'; import { NodejsFunction, OutputFormat } from 'aws-cdk-lib/aws-lambda-nodejs'; import { Construct } from 'constructs'; import { readFileSync } from 'fs'; @@ -104,7 +99,7 @@ export type FunctionProps = { /** * An amount of memory (RAM) to allocate to the function between 128 and 10240 MB. * Must be a whole number. - * Default is 128MB. + * Default is 512MB. */ memoryMB?: number; @@ -145,6 +140,20 @@ export type FunctionProps = { * @see [AWS documentation for Lambda layers](https://docs.aws.amazon.com/lambda/latest/dg/chapter-layers.html) */ layers?: Record; + + /* + * Options for bundling the function code. + */ + bundling?: FunctionBundlingOptions; +}; + +export type FunctionBundlingOptions = { + /** + * Whether to minify the function code. + * + * Defaults to true. + */ + minify?: boolean; }; /** @@ -192,6 +201,7 @@ class FunctionFactory implements ConstructFactory { environment: this.props.environment ?? {}, runtime: this.resolveRuntime(), schedule: this.resolveSchedule(), + bundling: this.resolveBundling(), layers, }; }; @@ -298,6 +308,27 @@ class FunctionFactory implements ConstructFactory { return this.props.schedule; }; + + private resolveBundling = () => { + const bundlingDefault = { + format: OutputFormat.ESM, + bundleAwsSDK: true, + loader: { + '.node': 'file', + }, + minify: true, + sourceMap: true, + }; + + return { + ...bundlingDefault, + minify: this.resolveMinify(this.props.bundling), + }; + }; + + private resolveMinify = (bundling?: FunctionBundlingOptions) => { + return bundling?.minify === undefined ? true : bundling.minify; + }; } type HydratedFunctionProps = Required; @@ -314,19 +345,10 @@ class FunctionGenerator implements ConstructContainerEntryGenerator { scope, backendSecretResolver, }: GenerateContainerEntryProps) => { - // resolve layers to LayerVersion objects for the NodejsFunction constructor using the scope. - const resolvedLayers = Object.entries(this.props.layers).map(([key, arn]) => - LayerVersion.fromLayerVersionArn( - scope, - `${this.props.name}-${key}-layer`, - arn - ) - ); - return new AmplifyFunction( scope, this.props.name, - { ...this.props, resolvedLayers }, + this.props, backendSecretResolver, this.outputStorageStrategy ); @@ -346,7 +368,7 @@ class AmplifyFunction constructor( scope: Construct, id: string, - props: HydratedFunctionProps & { resolvedLayers: ILayerVersion[] }, + props: HydratedFunctionProps, backendSecretResolver: BackendSecretResolver, outputStorageStrategy: BackendOutputStorageStrategy ) { @@ -354,6 +376,31 @@ class AmplifyFunction this.stack = Stack.of(scope); + // resolve layers to LayerVersion objects for the NodejsFunction constructor using the scope. + const resolvedLayers = Object.entries(props.layers).map(([key, arn]) => { + const layerRegion = arn.split(':')[3]; + // If region is an unresolved token, use lazy to get region + const region = Token.isUnresolved(this.stack.region) + ? Lazy.string({ + produce: () => this.stack.region, + }) + : this.stack.region; + + if (layerRegion !== region) { + throw new AmplifyUserError('InvalidLayerArnRegionError', { + message: `Region in ARN does not match function region for layer: ${key}`, + resolution: + 'Update the layer ARN with the same region as the function', + }); + } + + return LayerVersion.fromLayerVersionArn( + scope, + `${props.name}-${key}-layer`, + arn + ); + }); + const runtime = nodeVersionMap[props.runtime]; const require = createRequire(import.meta.url); @@ -396,17 +443,11 @@ class AmplifyFunction timeout: Duration.seconds(props.timeoutSeconds), memorySize: props.memoryMB, runtime: nodeVersionMap[props.runtime], - layers: props.resolvedLayers, + layers: resolvedLayers, bundling: { - format: OutputFormat.ESM, + ...props.bundling, banner: bannerCode, - bundleAwsSDK: true, inject: shims, - loader: { - '.node': 'file', - }, - minify: true, - sourceMap: true, externalModules: Object.keys(props.layers), }, }); diff --git a/packages/backend-function/src/function_env_type_generator.test.ts b/packages/backend-function/src/function_env_type_generator.test.ts index 0f950e2e31..a5d85955a3 100644 --- a/packages/backend-function/src/function_env_type_generator.test.ts +++ b/packages/backend-function/src/function_env_type_generator.test.ts @@ -73,6 +73,38 @@ void describe('FunctionEnvironmentTypeGenerator', () => { await fsp.rm(targetDirectory, { recursive: true, force: true }); }); + + void it('does not generate duplicate environment variables', () => { + const fsOpenSyncMock = mock.method(fs, 'openSync'); + const fsWriteFileSyncMock = mock.method(fs, 'writeFileSync', () => null); + fsOpenSyncMock.mock.mockImplementation(() => 0); + const functionEnvironmentTypeGenerator = + new FunctionEnvironmentTypeGenerator('testFunction'); + + functionEnvironmentTypeGenerator.generateTypedProcessEnvShim([ + 'TEST_ENV', + 'TEST_ENV', + 'ANOTHER_ENV', + ]); + + const generatedContent = + fsWriteFileSyncMock.mock.calls[0].arguments[1]?.toString() ?? ''; + + // Check TEST_ENV appears only once + assert.equal( + (generatedContent.match(/TEST_ENV: string;/g) || []).length, + 1, + 'TEST_ENV should appear only once' + ); + + // Check ANOTHER_ENV also appears + assert.ok( + generatedContent.includes('ANOTHER_ENV: string;'), + 'ANOTHER_ENV should be included' + ); + + mock.restoreAll(); + }); void it('clears the generated env directory even if there are multiple calls', async () => { const fsExistsSyncMock = mock.method(fs, 'existsSync', () => true); const fsRmSyncMock = mock.method(fs, 'rmSync', () => {}); diff --git a/packages/backend-function/src/function_env_type_generator.ts b/packages/backend-function/src/function_env_type_generator.ts index 7302bc75ba..162c5ecd37 100644 --- a/packages/backend-function/src/function_env_type_generator.ts +++ b/packages/backend-function/src/function_env_type_generator.ts @@ -63,7 +63,11 @@ export class FunctionEnvironmentTypeGenerator { `/** Amplify backend environment variables available at runtime, this includes environment variables defined in \`defineFunction\` and by cross resource mechanisms */` ); declarations.push(`type ${amplifyBackendEnvVarTypeName} = {`); - amplifyBackendEnvVars.forEach((envName) => { + + // Use a Set to remove duplicates + const uniqueEnvVars = new Set(amplifyBackendEnvVars); + + uniqueEnvVars.forEach((envName) => { const declaration = `${this.indentation}${envName}: string;`; declarations.push(declaration); diff --git a/packages/backend-function/src/layer_parser.test.ts b/packages/backend-function/src/layer_parser.test.ts index 4d1e8ea5cc..a00fb30624 100644 --- a/packages/backend-function/src/layer_parser.test.ts +++ b/packages/backend-function/src/layer_parser.test.ts @@ -20,7 +20,7 @@ const createStackAndSetContext = (): Stack => { app.node.setContext('amplify-backend-name', 'testEnvName'); app.node.setContext('amplify-backend-namespace', 'testBackendId'); app.node.setContext('amplify-backend-type', 'branch'); - const stack = new Stack(app); + const stack = new Stack(app, 'Stack', { env: { region: 'us-east-1' } }); return stack; }; diff --git a/packages/backend-output-schemas/API.md b/packages/backend-output-schemas/API.md index 6fd3f07d27..23c5a41d89 100644 --- a/packages/backend-output-schemas/API.md +++ b/packages/backend-output-schemas/API.md @@ -133,6 +133,7 @@ export const unifiedBackendOutputSchema: z.ZodObject<{ oauthRedirectSignOut: z.ZodOptional; oauthClientId: z.ZodOptional; oauthResponseType: z.ZodOptional; + groups: z.ZodOptional; }, "strip", z.ZodTypeAny, { authRegion: string; userPoolId: string; @@ -153,6 +154,7 @@ export const unifiedBackendOutputSchema: z.ZodObject<{ oauthRedirectSignOut?: string | undefined; oauthClientId?: string | undefined; oauthResponseType?: string | undefined; + groups?: string | undefined; }, { authRegion: string; userPoolId: string; @@ -173,6 +175,7 @@ export const unifiedBackendOutputSchema: z.ZodObject<{ oauthRedirectSignOut?: string | undefined; oauthClientId?: string | undefined; oauthResponseType?: string | undefined; + groups?: string | undefined; }>; }, "strip", z.ZodTypeAny, { version: "1"; @@ -196,6 +199,7 @@ export const unifiedBackendOutputSchema: z.ZodObject<{ oauthRedirectSignOut?: string | undefined; oauthClientId?: string | undefined; oauthResponseType?: string | undefined; + groups?: string | undefined; }; }, { version: "1"; @@ -219,6 +223,7 @@ export const unifiedBackendOutputSchema: z.ZodObject<{ oauthRedirectSignOut?: string | undefined; oauthClientId?: string | undefined; oauthResponseType?: string | undefined; + groups?: string | undefined; }; }>]>>; "AWS::Amplify::GraphQL": z.ZodOptional; oauthClientId: z.ZodOptional; oauthResponseType: z.ZodOptional; + groups: z.ZodOptional; }, "strip", z.ZodTypeAny, { authRegion: string; userPoolId: string; @@ -575,6 +583,7 @@ export const versionedAuthOutputSchema: z.ZodDiscriminatedUnion<"version", [z.Zo oauthRedirectSignOut?: string | undefined; oauthClientId?: string | undefined; oauthResponseType?: string | undefined; + groups?: string | undefined; }, { authRegion: string; userPoolId: string; @@ -595,6 +604,7 @@ export const versionedAuthOutputSchema: z.ZodDiscriminatedUnion<"version", [z.Zo oauthRedirectSignOut?: string | undefined; oauthClientId?: string | undefined; oauthResponseType?: string | undefined; + groups?: string | undefined; }>; }, "strip", z.ZodTypeAny, { version: "1"; @@ -618,6 +628,7 @@ export const versionedAuthOutputSchema: z.ZodDiscriminatedUnion<"version", [z.Zo oauthRedirectSignOut?: string | undefined; oauthClientId?: string | undefined; oauthResponseType?: string | undefined; + groups?: string | undefined; }; }, { version: "1"; @@ -641,6 +652,7 @@ export const versionedAuthOutputSchema: z.ZodDiscriminatedUnion<"version", [z.Zo oauthRedirectSignOut?: string | undefined; oauthClientId?: string | undefined; oauthResponseType?: string | undefined; + groups?: string | undefined; }; }>]>; diff --git a/packages/backend-output-schemas/CHANGELOG.md b/packages/backend-output-schemas/CHANGELOG.md index a50379c41e..0212c02ffd 100644 --- a/packages/backend-output-schemas/CHANGELOG.md +++ b/packages/backend-output-schemas/CHANGELOG.md @@ -1,5 +1,11 @@ # @aws-amplify/backend-output-schemas +## 1.4.0 + +### Minor Changes + +- 5f46d8d: add user groups to outputs + ## 1.3.0 ### Minor Changes diff --git a/packages/backend-output-schemas/package.json b/packages/backend-output-schemas/package.json index 1f1a70be4b..93e0106dcb 100644 --- a/packages/backend-output-schemas/package.json +++ b/packages/backend-output-schemas/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-output-schemas", - "version": "1.3.0", + "version": "1.4.0", "type": "commonjs", "publishConfig": { "access": "public" diff --git a/packages/backend-output-schemas/src/auth/v1.ts b/packages/backend-output-schemas/src/auth/v1.ts index af4d65cc8c..a17c2cd237 100644 --- a/packages/backend-output-schemas/src/auth/v1.ts +++ b/packages/backend-output-schemas/src/auth/v1.ts @@ -27,5 +27,6 @@ export const authOutputSchema = z.object({ oauthRedirectSignOut: z.string().optional(), oauthClientId: z.string().optional(), oauthResponseType: z.string().optional(), + groups: z.string().optional(), // JSON array as string }), }); diff --git a/packages/backend-output-storage/CHANGELOG.md b/packages/backend-output-storage/CHANGELOG.md index e8b0e9f9a5..89eac4993f 100644 --- a/packages/backend-output-storage/CHANGELOG.md +++ b/packages/backend-output-storage/CHANGELOG.md @@ -1,5 +1,13 @@ # @aws-amplify/backend-output-storage +## 1.1.3 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [b56d344] + - @aws-amplify/plugin-types@1.3.1 + ## 1.1.2 ### Patch Changes diff --git a/packages/backend-output-storage/package.json b/packages/backend-output-storage/package.json index 228a0f85b1..839a1351da 100644 --- a/packages/backend-output-storage/package.json +++ b/packages/backend-output-storage/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-output-storage", - "version": "1.1.2", + "version": "1.1.3", "type": "commonjs", "publishConfig": { "access": "public" @@ -21,9 +21,9 @@ "dependencies": { "@aws-amplify/backend-output-schemas": "^1.2.0", "@aws-amplify/platform-core": "^1.0.6", - "@aws-amplify/plugin-types": "^1.2.2" + "@aws-amplify/plugin-types": "^1.3.1" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0" + "aws-cdk-lib": "^2.158.0" } } diff --git a/packages/backend-platform-test-stubs/CHANGELOG.md b/packages/backend-platform-test-stubs/CHANGELOG.md index 0b73fa2b05..436f8411ba 100644 --- a/packages/backend-platform-test-stubs/CHANGELOG.md +++ b/packages/backend-platform-test-stubs/CHANGELOG.md @@ -1,5 +1,13 @@ # @aws-amplify/backend-platform-test-stubs +## 0.3.6 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [b56d344] + - @aws-amplify/plugin-types@1.3.1 + ## 0.3.5 ### Patch Changes diff --git a/packages/backend-platform-test-stubs/package.json b/packages/backend-platform-test-stubs/package.json index 953c053355..4d8ab06e8f 100644 --- a/packages/backend-platform-test-stubs/package.json +++ b/packages/backend-platform-test-stubs/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-platform-test-stubs", - "version": "0.3.5", + "version": "0.3.6", "type": "module", "private": true, "exports": { @@ -16,8 +16,8 @@ }, "license": "Apache-2.0", "dependencies": { - "@aws-amplify/plugin-types": "^1.2.2", - "aws-cdk-lib": "^2.152.0", + "@aws-amplify/plugin-types": "^1.3.1", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } } diff --git a/packages/backend-secret/CHANGELOG.md b/packages/backend-secret/CHANGELOG.md index 495d2b9e2c..3ff634508c 100644 --- a/packages/backend-secret/CHANGELOG.md +++ b/packages/backend-secret/CHANGELOG.md @@ -1,5 +1,17 @@ # @aws-amplify/backend-secret +## 1.1.5 + +### Patch Changes + +- 255ca18: Handle parameter not found error while deleting secret + +## 1.1.4 + +### Patch Changes + +- f87cc87: fix: internally paginate list secret calls + ## 1.1.3 ### Patch Changes diff --git a/packages/backend-secret/package.json b/packages/backend-secret/package.json index e5e6735090..8d3f372b9c 100644 --- a/packages/backend-secret/package.json +++ b/packages/backend-secret/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-secret", - "version": "1.1.3", + "version": "1.1.5", "type": "module", "publishConfig": { "access": "public" diff --git a/packages/backend-secret/src/ssm_secret.test.ts b/packages/backend-secret/src/ssm_secret.test.ts index c33da91cc1..fbde647a4d 100644 --- a/packages/backend-secret/src/ssm_secret.test.ts +++ b/packages/backend-secret/src/ssm_secret.test.ts @@ -1,6 +1,7 @@ import { beforeEach, describe, it, mock } from 'node:test'; import { GetParameterCommandOutput, + GetParametersByPathCommandInput, GetParametersByPathCommandOutput, InternalServerError, ParameterNotFound, @@ -306,6 +307,7 @@ void describe('SSMSecret', () => { assert.deepStrictEqual( mockGetParametersByPath.mock.calls[0].arguments[0], { + NextToken: undefined, Path: testBranchPath, WithDecryption: true, } @@ -337,6 +339,7 @@ void describe('SSMSecret', () => { assert.deepStrictEqual( mockGetParametersByPath.mock.calls[0].arguments[0], { + NextToken: undefined, Path: testSharedPath, WithDecryption: true, } @@ -344,6 +347,68 @@ void describe('SSMSecret', () => { assert.deepEqual(secrets, [testSecretListItem]); }); + void it('lists all secrets by internally paginating calls', async () => { + const mockGetParametersByPath = mock.method( + ssmClient, + 'getParametersByPath', + (input: GetParametersByPathCommandInput) => { + let nextToken: string | undefined = undefined; + if (!input.NextToken) { + nextToken = '1'; + } else if (input.NextToken === '1') { + nextToken = '2'; + } else if (input.NextToken === '2') { + nextToken = undefined; + } + return Promise.resolve({ + NextToken: nextToken, + Parameters: [ + { + Name: testSharedSecretFullNamePath.concat( + input.NextToken ?? '' + ), + Value: testSecretValue, + Version: testSecretVersion, + LastModifiedDate: testSecretLastUpdated, + }, + ], + } as GetParametersByPathCommandOutput); + } + ); + + const secrets = await ssmSecretClient.listSecrets(testBackendId); + assert.deepStrictEqual(mockGetParametersByPath.mock.calls.length, 3); + assert.deepStrictEqual( + mockGetParametersByPath.mock.calls[0].arguments[0], + { + NextToken: undefined, + Path: testSharedPath, + WithDecryption: true, + } + ); + assert.deepStrictEqual( + mockGetParametersByPath.mock.calls[1].arguments[0], + { + NextToken: '1', + Path: testSharedPath, + WithDecryption: true, + } + ); + assert.deepStrictEqual( + mockGetParametersByPath.mock.calls[2].arguments[0], + { + NextToken: '2', + Path: testSharedPath, + WithDecryption: true, + } + ); + assert.deepEqual(secrets, [ + { ...testSecretListItem, name: testSecretName }, + { ...testSecretListItem, name: testSecretName + '1' }, + { ...testSecretListItem, name: testSecretName + '2' }, + ]); + }); + void it('lists an empty list', async () => { const mockGetParametersByPath = mock.method( ssmClient, @@ -359,6 +424,7 @@ void describe('SSMSecret', () => { assert.deepStrictEqual( mockGetParametersByPath.mock.calls[0].arguments[0], { + NextToken: undefined, Path: testBranchPath, WithDecryption: true, } diff --git a/packages/backend-secret/src/ssm_secret.ts b/packages/backend-secret/src/ssm_secret.ts index c8c3421975..cd5b44563f 100644 --- a/packages/backend-secret/src/ssm_secret.ts +++ b/packages/backend-secret/src/ssm_secret.ts @@ -68,24 +68,29 @@ export class SSMSecretClient implements SecretClient { const result: SecretListItem[] = []; try { - const resp = await this.ssmClient.getParametersByPath({ - Path: path, - WithDecryption: true, - }); + let nextToken: string | undefined; + do { + const resp = await this.ssmClient.getParametersByPath({ + Path: path, + WithDecryption: true, + NextToken: nextToken, + }); - resp.Parameters?.forEach((param) => { - if (!param.Name || !param.Value) { - return; - } - const secretName = param.Name.split('/').pop(); - if (secretName) { - result.push({ - name: secretName, - version: param.Version, - lastUpdated: param.LastModifiedDate, - }); - } - }); + resp.Parameters?.forEach((param) => { + if (!param.Name || !param.Value) { + return; + } + const secretName = param.Name.split('/').pop(); + if (secretName) { + result.push({ + name: secretName, + version: param.Version, + lastUpdated: param.LastModifiedDate, + }); + } + }); + nextToken = resp.NextToken; + } while (nextToken); return result; } catch (err) { throw SecretError.createInstance(err as Error); diff --git a/packages/backend-secret/src/ssm_secret_with_amplify_error_handling.test.ts b/packages/backend-secret/src/ssm_secret_with_amplify_error_handling.test.ts index f85d327d78..88c29dd598 100644 --- a/packages/backend-secret/src/ssm_secret_with_amplify_error_handling.test.ts +++ b/packages/backend-secret/src/ssm_secret_with_amplify_error_handling.test.ts @@ -89,6 +89,31 @@ void describe('getSecretClientWithAmplifyErrorHandling', () => { ); }); + void it('throws AmplifyUserError if removeSecret fails due to ParameterNotFound error', async (context) => { + const notFoundError = new Error('Parameter not found error'); + notFoundError.name = 'ParameterNotFound'; + const secretsError = SecretError.createInstance(notFoundError); + context.mock.method(rawSecretClient, 'removeSecret', () => { + throw secretsError; + }); + const secretName = 'testSecretName'; + await assert.rejects( + () => + classUnderTest.removeSecret( + { + namespace: 'testSandboxId', + name: 'testSandboxName', + type: 'sandbox', + }, + secretName + ), + new AmplifyUserError('SSMParameterNotFoundError', { + message: `Failed to remove ${secretName} secret. ParameterNotFound: Parameter not found error`, + resolution: `Make sure that ${secretName} has been set. See https://docs.amplify.aws/react/deploy-and-host/fullstack-branching/secrets-and-vars/.`, + }) + ); + }); + void it('throws AmplifyFault if listSecrets fails due to a non-SSM exception other than expired credentials', async (context) => { const underlyingError = new Error('some secret error'); const secretsError = SecretError.createInstance(underlyingError); diff --git a/packages/backend-secret/src/ssm_secret_with_amplify_error_handling.ts b/packages/backend-secret/src/ssm_secret_with_amplify_error_handling.ts index be3d4801ae..7d18d1c489 100644 --- a/packages/backend-secret/src/ssm_secret_with_amplify_error_handling.ts +++ b/packages/backend-secret/src/ssm_secret_with_amplify_error_handling.ts @@ -69,7 +69,7 @@ export class SSMSecretClientWithAmplifyErrorHandling implements SecretClient { secretName ); } catch (e) { - throw this.translateToAmplifyError(e, 'Remove'); + throw this.translateToAmplifyError(e, 'Remove', { name: secretName }); } }; @@ -87,6 +87,7 @@ export class SSMSecretClientWithAmplifyErrorHandling implements SecretClient { 'ExpiredTokenException', 'ExpiredToken', 'CredentialsProviderError', + 'IncompleteSignatureException', 'InvalidSignatureException', ].includes(error.cause.name) ) { @@ -100,11 +101,13 @@ export class SSMSecretClientWithAmplifyErrorHandling implements SecretClient { } if ( error.cause.name === 'ParameterNotFound' && - apiName === 'Get' && + (apiName === 'Get' || apiName === 'Remove') && secretIdentifier ) { return new AmplifyUserError('SSMParameterNotFoundError', { - message: `Failed to get ${secretIdentifier.name} secret. ${error.cause.name}: ${error.cause?.message}`, + message: `Failed to ${apiName.toLowerCase()} ${ + secretIdentifier.name + } secret. ${error.cause.name}: ${error.cause?.message}`, resolution: `Make sure that ${secretIdentifier.name} has been set. See https://docs.amplify.aws/react/deploy-and-host/fullstack-branching/secrets-and-vars/.`, }); } diff --git a/packages/backend-storage/CHANGELOG.md b/packages/backend-storage/CHANGELOG.md index e5eb78f3b4..c164598d79 100644 --- a/packages/backend-storage/CHANGELOG.md +++ b/packages/backend-storage/CHANGELOG.md @@ -1,5 +1,14 @@ # @aws-amplify/backend-storage +## 1.2.2 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [b56d344] + - @aws-amplify/backend-output-storage@1.1.3 + - @aws-amplify/plugin-types@1.3.1 + ## 1.2.1 ### Patch Changes diff --git a/packages/backend-storage/package.json b/packages/backend-storage/package.json index ac19d60472..e6f193aedc 100644 --- a/packages/backend-storage/package.json +++ b/packages/backend-storage/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-storage", - "version": "1.2.1", + "version": "1.2.2", "type": "module", "publishConfig": { "access": "public" @@ -20,15 +20,15 @@ "license": "Apache-2.0", "dependencies": { "@aws-amplify/backend-output-schemas": "^1.2.1", - "@aws-amplify/backend-output-storage": "^1.1.2", - "@aws-amplify/plugin-types": "^1.3.0" + "@aws-amplify/backend-output-storage": "^1.1.3", + "@aws-amplify/plugin-types": "^1.3.1" }, "devDependencies": { - "@aws-amplify/backend-platform-test-stubs": "^0.3.5", + "@aws-amplify/backend-platform-test-stubs": "^0.3.6", "@aws-amplify/platform-core": "^1.0.6" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } } diff --git a/packages/backend/CHANGELOG.md b/packages/backend/CHANGELOG.md index 1a319a379f..5e1f459d4d 100644 --- a/packages/backend/CHANGELOG.md +++ b/packages/backend/CHANGELOG.md @@ -1,5 +1,77 @@ # @aws-amplify/backend +## 1.6.2 + +### Patch Changes + +- 583a3f2: Fix detection of AmplifyErrors +- Updated dependencies [583a3f2] + - @aws-amplify/platform-core@1.2.0 + - @aws-amplify/backend-data@1.1.7 + +## 1.6.1 + +### Patch Changes + +- 4e97389: add validation if layer arn region does not match function region +- Updated dependencies [d0d8d4e] +- Updated dependencies [4e97389] + - @aws-amplify/client-config@1.5.2 + - @aws-amplify/backend-function@1.7.4 + +## 1.6.0 + +### Minor Changes + +- 11d62fe: Add support for custom Lambda function email senders in Auth construct + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [11d62fe] +- Updated dependencies [b56d344] + - @aws-amplify/backend-auth@1.3.0 + - @aws-amplify/backend-output-storage@1.1.3 + - @aws-amplify/backend-function@1.7.3 + - @aws-amplify/backend-storage@1.2.2 + - @aws-amplify/client-config@1.5.1 + - @aws-amplify/backend-data@1.1.6 + - @aws-amplify/plugin-types@1.3.1 + +## 1.5.2 + +### Patch Changes + +- 601a2c1: dedupe environment variables in amplify env type generator +- Updated dependencies [601a2c1] + - @aws-amplify/backend-function@1.7.2 + +## 1.5.1 + +### Patch Changes + +- 5f46d8d: add user groups to outputs +- Updated dependencies [0d6489d] +- Updated dependencies [bd4ff4d] +- Updated dependencies [5f46d8d] + - @aws-amplify/backend-data@1.1.5 + - @aws-amplify/backend-function@1.7.1 + - @aws-amplify/backend-output-schemas@1.4.0 + - @aws-amplify/client-config@1.5.0 + +## 1.5.0 + +### Minor Changes + +- 4720412: Add minify option to defineFunction + +### Patch Changes + +- Updated dependencies [f87cc87] +- Updated dependencies [4720412] + - @aws-amplify/backend-secret@1.1.4 + - @aws-amplify/backend-function@1.7.0 + ## 1.4.0 ### Minor Changes diff --git a/packages/backend/package.json b/packages/backend/package.json index 2d477fe3ee..25a1e02315 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend", - "version": "1.4.0", + "version": "1.6.2", "type": "module", "publishConfig": { "access": "public" @@ -26,21 +26,21 @@ "license": "Apache-2.0", "dependencies": { "@aws-amplify/data-schema": "^1.0.0", - "@aws-amplify/backend-auth": "^1.2.0", - "@aws-amplify/backend-function": "^1.6.0", - "@aws-amplify/backend-data": "^1.1.4", - "@aws-amplify/backend-output-schemas": "^1.3.0", - "@aws-amplify/backend-output-storage": "^1.1.2", - "@aws-amplify/backend-secret": "^1.1.2", - "@aws-amplify/backend-storage": "^1.2.1", - "@aws-amplify/client-config": "^1.4.0", - "@aws-amplify/platform-core": "^1.1.0", - "@aws-amplify/plugin-types": "^1.3.0", + "@aws-amplify/backend-auth": "^1.3.0", + "@aws-amplify/backend-function": "^1.7.4", + "@aws-amplify/backend-data": "^1.1.7", + "@aws-amplify/backend-output-schemas": "^1.4.0", + "@aws-amplify/backend-output-storage": "^1.1.3", + "@aws-amplify/backend-secret": "^1.1.4", + "@aws-amplify/backend-storage": "^1.2.2", + "@aws-amplify/client-config": "^1.5.2", + "@aws-amplify/platform-core": "^1.2.0", + "@aws-amplify/plugin-types": "^1.3.1", "@aws-sdk/client-amplify": "^3.624.0", "lodash.snakecase": "^4.1.1" }, "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" }, "devDependencies": { diff --git a/packages/backend/src/backend_factory.test.ts b/packages/backend/src/backend_factory.test.ts index cfc01a9e8b..17567b85a8 100644 --- a/packages/backend/src/backend_factory.test.ts +++ b/packages/backend/src/backend_factory.test.ts @@ -196,7 +196,7 @@ void describe('Backend', () => { const backend = new BackendFactory({}, rootStack); const clientConfigPartial: DeepPartialAmplifyGeneratedConfigs = { - version: '1.2', + version: '1.3', custom: { someCustomOutput: 'someCustomOutputValue', }, diff --git a/packages/backend/src/backend_factory.ts b/packages/backend/src/backend_factory.ts index d2c7662df3..3a7218b3aa 100644 --- a/packages/backend/src/backend_factory.ts +++ b/packages/backend/src/backend_factory.ts @@ -33,7 +33,7 @@ const rootStackTypeIdentifier = 'root'; // Client config version that is used by `backend.addOutput()` const DEFAULT_CLIENT_CONFIG_VERSION_FOR_BACKEND_ADD_OUTPUT = - ClientConfigVersionOption.V1_2; + ClientConfigVersionOption.V1_3; /** * Factory that collects and instantiates all the Amplify backend constructs diff --git a/packages/backend/src/engine/custom_outputs_accumulator.test.ts b/packages/backend/src/engine/custom_outputs_accumulator.test.ts index dc89639ab8..77c4e4896d 100644 --- a/packages/backend/src/engine/custom_outputs_accumulator.test.ts +++ b/packages/backend/src/engine/custom_outputs_accumulator.test.ts @@ -59,11 +59,11 @@ void describe('Custom outputs accumulator', () => { ); const configPart1: DeepPartialAmplifyGeneratedConfigs = { - version: '1.2', + version: '1.3', custom: { output1: 'val1' }, }; const configPart2: DeepPartialAmplifyGeneratedConfigs = { - version: '1.2', + version: '1.3', custom: { output2: 'val2' }, }; accumulator.addOutput(configPart1); @@ -115,7 +115,7 @@ void describe('Custom outputs accumulator', () => { assert.throws( () => - accumulator.addOutput({ version: '1.2', custom: { output1: 'val1' } }), + accumulator.addOutput({ version: '1.3', custom: { output1: 'val1' } }), (error: AmplifyUserError) => { assert.strictEqual( error.message, diff --git a/packages/cli-core/CHANGELOG.md b/packages/cli-core/CHANGELOG.md index 6aefdbed09..290f6fb5c1 100644 --- a/packages/cli-core/CHANGELOG.md +++ b/packages/cli-core/CHANGELOG.md @@ -1,5 +1,11 @@ # @aws-amplify/cli-core +## 1.2.0 + +### Minor Changes + +- c3c3057: update ctrl+c behavior to always print guidance to delete and exit with no prompt + ## 1.1.3 ### Patch Changes diff --git a/packages/cli-core/package.json b/packages/cli-core/package.json index 61dd48e712..0d1e6f022d 100644 --- a/packages/cli-core/package.json +++ b/packages/cli-core/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/cli-core", - "version": "1.1.3", + "version": "1.2.0", "type": "module", "publishConfig": { "access": "public" diff --git a/packages/cli-core/src/package-manager-controller/package_manager_controller_base.ts b/packages/cli-core/src/package-manager-controller/package_manager_controller_base.ts index d722ef84f5..4d9786b391 100644 --- a/packages/cli-core/src/package-manager-controller/package_manager_controller_base.ts +++ b/packages/cli-core/src/package-manager-controller/package_manager_controller_base.ts @@ -137,6 +137,7 @@ export abstract class PackageManagerControllerBase /** * allowsSignalPropagation - Determines if the package manager allows the process * signals such as SIGINT to be propagated to the underlying node process. + * @deprecated */ allowsSignalPropagation = () => true; diff --git a/packages/cli-core/src/package-manager-controller/pnpm_package_manager_controller.ts b/packages/cli-core/src/package-manager-controller/pnpm_package_manager_controller.ts index 321ad9cd26..5eb13ea7de 100644 --- a/packages/cli-core/src/package-manager-controller/pnpm_package_manager_controller.ts +++ b/packages/cli-core/src/package-manager-controller/pnpm_package_manager_controller.ts @@ -32,10 +32,4 @@ export class PnpmPackageManagerController extends PackageManagerControllerBase { existsSync ); } - - /** - * Pnpm doesn't handle the node process gracefully during the SIGINT life cycle. - * See: https://github.com/pnpm/pnpm/issues/7374 - */ - allowsSignalPropagation = () => false; } diff --git a/packages/cli-core/src/package-manager-controller/yarn_classic_package_manager_controller.ts b/packages/cli-core/src/package-manager-controller/yarn_classic_package_manager_controller.ts index 2f7a837e76..48a4330ed9 100644 --- a/packages/cli-core/src/package-manager-controller/yarn_classic_package_manager_controller.ts +++ b/packages/cli-core/src/package-manager-controller/yarn_classic_package_manager_controller.ts @@ -37,12 +37,6 @@ export class YarnClassicPackageManagerController extends PackageManagerControlle await this.addTypescript(targetDir); await super.initializeTsConfig(targetDir); }; - /** - * - * Yarn doesn't respect the SIGINT life cycle and exits immediately leaving - * the node process hanging. See: https://github.com/yarnpkg/yarn/issues/8895 - */ - allowsSignalPropagation = () => false; private addTypescript = async (targetDir: string) => { await this.executeWithDebugLogger( diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index 6a1e1bfa24..8259daaccb 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,46 @@ # @aws-amplify/backend-cli +## 1.4.1 + +### Patch Changes + +- 583a3f2: Fix detection of AmplifyErrors +- Updated dependencies [583a3f2] + - @aws-amplify/platform-core@1.2.0 + - @aws-amplify/backend-deployer@1.1.8 + - @aws-amplify/sandbox@1.2.5 + +## 1.4.0 + +### Minor Changes + +- c3c3057: update ctrl+c behavior to always print guidance to delete and exit with no prompt + +### Patch Changes + +- Updated dependencies [c3c3057] +- Updated dependencies [b56d344] +- Updated dependencies [b56d344] + - @aws-amplify/cli-core@1.2.0 + - @aws-amplify/backend-deployer@1.1.6 + - @aws-amplify/schema-generator@1.2.5 + - @aws-amplify/client-config@1.5.1 + - @aws-amplify/plugin-types@1.3.1 + - @aws-amplify/sandbox@1.2.4 + +## 1.3.0 + +### Minor Changes + +- b2057f9: adds shorthand argument for version and help + +### Patch Changes + +- 5f46d8d: add user groups to outputs +- Updated dependencies [5f46d8d] + - @aws-amplify/backend-output-schemas@1.4.0 + - @aws-amplify/client-config@1.5.0 + ## 1.2.9 ### Patch Changes diff --git a/packages/cli/package.json b/packages/cli/package.json index a0559a1dc4..b765f084fa 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/backend-cli", - "version": "1.2.9", + "version": "1.4.1", "description": "Command line interface for various Amplify tools", "bin": { "ampx": "lib/ampx.js", @@ -31,18 +31,18 @@ }, "homepage": "https://github.com/aws-amplify/amplify-backend#readme", "dependencies": { - "@aws-amplify/backend-deployer": "^1.1.3", - "@aws-amplify/backend-output-schemas": "^1.2.1", + "@aws-amplify/backend-deployer": "^1.1.8", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/backend-secret": "^1.1.2", - "@aws-amplify/cli-core": "^1.1.3", - "@aws-amplify/client-config": "^1.4.0", + "@aws-amplify/cli-core": "^1.2.0", + "@aws-amplify/client-config": "^1.5.1", "@aws-amplify/deployed-backend-client": "^1.4.1", "@aws-amplify/form-generator": "^1.0.3", "@aws-amplify/model-generator": "^1.0.8", - "@aws-amplify/platform-core": "^1.0.5", - "@aws-amplify/plugin-types": "^1.3.0", - "@aws-amplify/sandbox": "^1.2.2", - "@aws-amplify/schema-generator": "^1.2.4", + "@aws-amplify/platform-core": "^1.2.0", + "@aws-amplify/plugin-types": "^1.3.1", + "@aws-amplify/sandbox": "^1.2.5", + "@aws-amplify/schema-generator": "^1.2.5", "@aws-sdk/client-amplify": "^3.624.0", "@aws-sdk/client-cloudformation": "^3.624.0", "@aws-sdk/client-s3": "^3.624.0", diff --git a/packages/cli/src/commands/generate/outputs/generate_outputs_command.test.ts b/packages/cli/src/commands/generate/outputs/generate_outputs_command.test.ts index b6aa2065d7..84b4a0f8e6 100644 --- a/packages/cli/src/commands/generate/outputs/generate_outputs_command.test.ts +++ b/packages/cli/src/commands/generate/outputs/generate_outputs_command.test.ts @@ -74,7 +74,7 @@ void describe('generate outputs command', () => { assert.equal(generateClientConfigMock.mock.callCount(), 1); assert.deepEqual( generateClientConfigMock.mock.calls[0].arguments[1], - '1.2' // default version + '1.3' // default version ); assert.deepEqual( generateClientConfigMock.mock.calls[0].arguments[2], @@ -97,7 +97,7 @@ void describe('generate outputs command', () => { assert.equal(generateClientConfigMock.mock.callCount(), 1); assert.deepEqual( generateClientConfigMock.mock.calls[0].arguments[1], - '1.2' // default version + '1.3' // default version ); assert.deepStrictEqual( generateClientConfigMock.mock.calls[0].arguments[2], @@ -118,7 +118,7 @@ void describe('generate outputs command', () => { namespace: 'app_id', type: 'branch', }, - '1.2', + '1.3', '/foo/bar', undefined, ] @@ -136,7 +136,7 @@ void describe('generate outputs command', () => { { stackName: 'stack_name', }, - '1.2', + '1.3', '/foo/bar', undefined, ] @@ -154,7 +154,7 @@ void describe('generate outputs command', () => { { stackName: 'stack_name', }, - '1.2', + '1.3', 'foo/bar', undefined, ] @@ -172,7 +172,7 @@ void describe('generate outputs command', () => { { stackName: 'stack_name', }, - '1.2', + '1.3', 'foo/bar', ClientConfigFormat.DART, ] diff --git a/packages/cli/src/commands/sandbox/sandbox-delete/sandbox_delete_command.test.ts b/packages/cli/src/commands/sandbox/sandbox-delete/sandbox_delete_command.test.ts index 62e80f946c..c919cd6141 100644 --- a/packages/cli/src/commands/sandbox/sandbox-delete/sandbox_delete_command.test.ts +++ b/packages/cli/src/commands/sandbox/sandbox-delete/sandbox_delete_command.test.ts @@ -1,10 +1,5 @@ import { beforeEach, describe, it, mock } from 'node:test'; -import { - AmplifyPrompter, - PackageManagerControllerFactory, - format, - printer, -} from '@aws-amplify/cli-core'; +import { AmplifyPrompter, format, printer } from '@aws-amplify/cli-core'; import yargs, { CommandModule } from 'yargs'; import { TestCommandRunner } from '../../../test-utils/command_runner.js'; import assert from 'node:assert'; @@ -50,8 +45,7 @@ void describe('sandbox delete command', () => { sandboxFactory, [sandboxDeleteCommand, createSandboxSecretCommand()], clientConfigGeneratorAdapterMock, - commandMiddleware, - new PackageManagerControllerFactory().getPackageManagerController() + commandMiddleware ); const parser = yargs().command(sandboxCommand as unknown as CommandModule); commandRunner = new TestCommandRunner(parser); diff --git a/packages/cli/src/commands/sandbox/sandbox_command.test.ts b/packages/cli/src/commands/sandbox/sandbox_command.test.ts index c485456549..a05bfa3168 100644 --- a/packages/cli/src/commands/sandbox/sandbox_command.test.ts +++ b/packages/cli/src/commands/sandbox/sandbox_command.test.ts @@ -8,7 +8,7 @@ import { TestCommandError, TestCommandRunner, } from '../../test-utils/command_runner.js'; -import { AmplifyPrompter, format, printer } from '@aws-amplify/cli-core'; +import { format, printer } from '@aws-amplify/cli-core'; import { EventHandler, SandboxCommand } from './sandbox_command.js'; import { createSandboxCommand } from './sandbox_command_factory.js'; import { SandboxDeleteCommand } from './sandbox-delete/sandbox_delete_command.js'; @@ -20,7 +20,6 @@ import { import { createSandboxSecretCommand } from './sandbox-secret/sandbox_secret_command_factory.js'; import { ClientConfigGeneratorAdapter } from '../../client-config/client_config_generator_adapter.js'; import { CommandMiddleware } from '../../command_middleware.js'; -import { PackageManagerController } from '@aws-amplify/plugin-types'; import { AmplifyError } from '@aws-amplify/platform-core'; mock.method(fsp, 'mkdir', () => Promise.resolve()); @@ -54,11 +53,6 @@ void describe('sandbox command', () => { ); const sandboxProfile = 'test-sandbox'; - const allowsSignalPropagationMock = mock.fn(() => true); - const packageManagerControllerMock = { - allowsSignalPropagation: allowsSignalPropagationMock, - } as unknown as PackageManagerController; - beforeEach(async () => { const sandboxFactory = new SandboxSingletonFactory( () => @@ -80,7 +74,6 @@ void describe('sandbox command', () => { [sandboxDeleteCommand, createSandboxSecretCommand()], clientConfigGeneratorAdapterMock, commandMiddleware, - packageManagerControllerMock, () => ({ successfulDeployment: [clientConfigGenerationMock], successfulDeletion: [clientConfigDeletionMock], @@ -128,7 +121,7 @@ void describe('sandbox command', () => { () => commandRunner.runCommand(`sandbox --identifier ${invalidIdentifier}`), // invalid identifier (err: TestCommandError) => { - assert.ok(err.error instanceof AmplifyError); + assert.ok(AmplifyError.isAmplifyError(err.error)); assert.strictEqual( err.error.message, 'Invalid --identifier provided: invalid@' @@ -189,118 +182,7 @@ void describe('sandbox command', () => { ); }); - void it('asks to delete the sandbox environment when users send ctrl-C and say yes to delete', async (contextual) => { - // Mock process and extract the sigint handler after calling the sandbox command - const processSignal = contextual.mock.method(process, 'on', () => { - /* no op */ - }); - const sandboxStartMock = contextual.mock.method( - sandbox, - 'start', - async () => Promise.resolve() - ); - - const sandboxDeleteMock = contextual.mock.method(sandbox, 'delete', () => - Promise.resolve() - ); - - // User said yes to delete - contextual.mock.method(AmplifyPrompter, 'yesOrNo', () => - Promise.resolve(true) - ); - - await commandRunner.runCommand('sandbox'); - - // Similar to the later 0ms timeout. Without this tests in github action are failing - // but working locally - await new Promise((resolve) => setTimeout(resolve, 0)); - const sigIntHandlerFn = processSignal.mock.calls[0].arguments[1]; - if (sigIntHandlerFn) sigIntHandlerFn(); - - // I can't find any open node:test or yargs issues that would explain why this is necessary - // but for some reason the mock call count does not update without this 0ms wait - await new Promise((resolve) => setTimeout(resolve, 0)); - assert.equal(sandboxStartMock.mock.callCount(), 1); - assert.equal(sandboxDeleteMock.mock.callCount(), 1); - }); - - void it('asks to delete the sandbox environment when users send ctrl-C and say yes to delete with profile', async (contextual) => { - // Mock process and extract the sigint handler after calling the sandbox command - const processSignal = contextual.mock.method(process, 'on', () => { - /* no op */ - }); - const sandboxStartMock = contextual.mock.method( - sandbox, - 'start', - async () => Promise.resolve() - ); - - const sandboxDeleteMock = contextual.mock.method(sandbox, 'delete', () => - Promise.resolve() - ); - - // User said yes to delete - contextual.mock.method(AmplifyPrompter, 'yesOrNo', () => - Promise.resolve(true) - ); - - const profile = 'test_profile'; - await commandRunner.runCommand(`sandbox --profile ${profile}`); - - // Similar to the later 0ms timeout. Without this tests in github action are failing - // but working locally - await new Promise((resolve) => setTimeout(resolve, 0)); - const sigIntHandlerFn = processSignal.mock.calls[0].arguments[1]; - if (sigIntHandlerFn) sigIntHandlerFn(); - - // I can't find any open node:test or yargs issues that would explain why this is necessary - // but for some reason the mock call count does not update without this 0ms wait - await new Promise((resolve) => setTimeout(resolve, 0)); - assert.equal(sandboxStartMock.mock.callCount(), 1); - assert.equal(sandboxDeleteMock.mock.callCount(), 1); - assert.deepStrictEqual(sandboxDeleteMock.mock.calls[0].arguments[0], { - identifier: undefined, - profile, - }); - }); - - void it('asks to delete the sandbox environment when users send ctrl-C and say no to delete', async (contextual) => { - // Mock process and extract the sigint handler after calling the sandbox command - const processSignal = contextual.mock.method(process, 'on', () => { - /* no op */ - }); - const sandboxStartMock = contextual.mock.method( - sandbox, - 'start', - async () => Promise.resolve() - ); - - const sandboxDeleteMock = contextual.mock.method( - sandbox, - 'delete', - async () => Promise.resolve() - ); - - // User said no to delete - contextual.mock.method(AmplifyPrompter, 'yesOrNo', () => - Promise.resolve(false) - ); - - await commandRunner.runCommand('sandbox'); - - // Similar to the previous test's 0ms timeout. Without this tests in github action are failing - // but working locally - await new Promise((resolve) => setTimeout(resolve, 0)); - const sigIntHandlerFn = processSignal.mock.calls[0].arguments[1]; - if (sigIntHandlerFn) sigIntHandlerFn(); - - assert.equal(sandboxStartMock.mock.callCount(), 1); - assert.equal(sandboxDeleteMock.mock.callCount(), 0); - }); - - void it('Does not prompt for deleting the sandbox if package manager does not allow signal propagation', async (contextual) => { - allowsSignalPropagationMock.mock.mockImplementationOnce(() => false); - + void it('Prints stopping sandbox and instructions to delete sandbox when users send ctrl+c', async (contextual) => { // Mock process and extract the sigint handler after calling the sandbox command const processSignal = contextual.mock.method(process, 'on', () => { /* no op */ @@ -371,7 +253,6 @@ void describe('sandbox command', () => { [], clientConfigGeneratorAdapterMock, commandMiddleware, - packageManagerControllerMock, undefined ); const parser = yargs().command(sandboxCommand as unknown as CommandModule); @@ -427,15 +308,15 @@ void describe('sandbox command', () => { ); }); - void it('sandbox creates an empty client config file if one does not already exist for version 1.2', async (contextual) => { + void it('sandbox creates an empty client config file if one does not already exist for version 1.3', async (contextual) => { contextual.mock.method(fs, 'existsSync', () => false); const writeFileMock = contextual.mock.method(fsp, 'writeFile', () => true); - await commandRunner.runCommand('sandbox --outputs-version 1.2'); + await commandRunner.runCommand('sandbox --outputs-version 1.3'); assert.equal(sandboxStartMock.mock.callCount(), 1); assert.equal(writeFileMock.mock.callCount(), 1); assert.deepStrictEqual( writeFileMock.mock.calls[0].arguments[1], - `{\n "version": "1.2"\n}` + `{\n "version": "1.3"\n}` ); assert.deepStrictEqual( writeFileMock.mock.calls[0].arguments[0], diff --git a/packages/cli/src/commands/sandbox/sandbox_command.ts b/packages/cli/src/commands/sandbox/sandbox_command.ts index 356d0a8eb6..37c069286b 100644 --- a/packages/cli/src/commands/sandbox/sandbox_command.ts +++ b/packages/cli/src/commands/sandbox/sandbox_command.ts @@ -1,7 +1,7 @@ import { ArgumentsCamelCase, Argv, CommandModule } from 'yargs'; import fs from 'fs'; import fsp from 'fs/promises'; -import { AmplifyPrompter, format, printer } from '@aws-amplify/cli-core'; +import { format, printer } from '@aws-amplify/cli-core'; import { SandboxFunctionStreamingOptions, SandboxSingletonFactory, @@ -20,7 +20,6 @@ import { ClientConfigGeneratorAdapter } from '../../client-config/client_config_ import { CommandMiddleware } from '../../command_middleware.js'; import { SandboxCommandGlobalOptions } from './option_types.js'; import { ArgumentsKebabCase } from '../../kebab_case.js'; -import { PackageManagerController } from '@aws-amplify/plugin-types'; import { AmplifyUserError } from '@aws-amplify/platform-core'; export type SandboxCommandOptionsKebabCase = ArgumentsKebabCase< @@ -81,7 +80,6 @@ export class SandboxCommand private readonly sandboxSubCommands: CommandModule[], private clientConfigGeneratorAdapter: ClientConfigGeneratorAdapter, private commandMiddleware: CommandMiddleware, - private readonly packageManagerController: PackageManagerController, private readonly sandboxEventHandlerCreator?: SandboxEventHandlerCreator ) { this.command = 'sandbox'; @@ -276,23 +274,11 @@ export class SandboxCommand }; sigIntHandler = async () => { - if (!this.packageManagerController.allowsSignalPropagation()) { - printer.print( - `Stopping the sandbox process. To delete the sandbox, run ${format.normalizeAmpxCommand( - 'sandbox delete' - )}` - ); - return; - } - const answer = await AmplifyPrompter.yesOrNo({ - message: - 'Would you like to delete all the resources in your sandbox environment (This cannot be undone)?', - defaultValue: false, - }); - if (answer) - await ( - await this.sandboxFactory.getInstance() - ).delete({ identifier: this.sandboxIdentifier, profile: this.profile }); + printer.print( + `Stopping the sandbox process. To delete the sandbox, run ${format.normalizeAmpxCommand( + 'sandbox delete' + )}` + ); }; private validateDirectory = async (option: string, dir: string) => { diff --git a/packages/cli/src/commands/sandbox/sandbox_command_factory.ts b/packages/cli/src/commands/sandbox/sandbox_command_factory.ts index 07afc979d0..ef5454a931 100644 --- a/packages/cli/src/commands/sandbox/sandbox_command_factory.ts +++ b/packages/cli/src/commands/sandbox/sandbox_command_factory.ts @@ -16,11 +16,7 @@ import { } from '@aws-amplify/platform-core'; import { SandboxEventHandlerFactory } from './sandbox_event_handler_factory.js'; import { CommandMiddleware } from '../../command_middleware.js'; -import { - PackageManagerControllerFactory, - format, - printer, -} from '@aws-amplify/cli-core'; +import { format, printer } from '@aws-amplify/cli-core'; import { S3Client } from '@aws-sdk/client-s3'; import { AmplifyClient } from '@aws-sdk/client-amplify'; import { CloudFormationClient } from '@aws-sdk/client-cloudformation'; @@ -70,7 +66,6 @@ export const createSandboxCommand = (): CommandModule< [new SandboxDeleteCommand(sandboxFactory), createSandboxSecretCommand()], clientConfigGeneratorAdapter, commandMiddleWare, - new PackageManagerControllerFactory().getPackageManagerController(), eventHandlerFactory.getSandboxEventHandlers ); }; diff --git a/packages/cli/src/commands/sandbox/sandbox_event_handler_factory.test.ts b/packages/cli/src/commands/sandbox/sandbox_event_handler_factory.test.ts index 2ba2996a0e..e977b00f51 100644 --- a/packages/cli/src/commands/sandbox/sandbox_event_handler_factory.test.ts +++ b/packages/cli/src/commands/sandbox/sandbox_event_handler_factory.test.ts @@ -23,7 +23,7 @@ void describe('sandbox_event_handler_factory', () => { } as unknown as ClientConfigGeneratorAdapter; const clientConfigLifecycleHandler = new ClientConfigLifecycleHandler( clientConfigGeneratorAdapterMock, - '1.2', + '1.3', 'test-out', ClientConfigFormat.JSON ); @@ -73,7 +73,7 @@ void describe('sandbox_event_handler_factory', () => { namespace: 'test', name: 'name', }, - '1.2', + '1.3', 'test-out', 'json', ]); @@ -185,7 +185,7 @@ void describe('sandbox_event_handler_factory', () => { namespace: 'test', name: 'name', }, - '1.2', + '1.3', 'test-out', 'json', ]); diff --git a/packages/cli/src/commands/sandbox/sandbox_event_handler_factory.ts b/packages/cli/src/commands/sandbox/sandbox_event_handler_factory.ts index 858008c76e..5d03b11c2c 100644 --- a/packages/cli/src/commands/sandbox/sandbox_event_handler_factory.ts +++ b/packages/cli/src/commands/sandbox/sandbox_event_handler_factory.ts @@ -64,7 +64,7 @@ export class SandboxEventHandlerFactory { return; } const deployError = args[0]; - if (deployError && deployError instanceof AmplifyError) { + if (deployError && AmplifyError.isAmplifyError(deployError)) { await usageDataEmitter.emitFailure(deployError, { command: 'Sandbox', }); diff --git a/packages/cli/src/error_handler.ts b/packages/cli/src/error_handler.ts index 522f2cb9ec..806020e986 100644 --- a/packages/cli/src/error_handler.ts +++ b/packages/cli/src/error_handler.ts @@ -111,7 +111,7 @@ const handleError = async ({ printMessagePreamble?.(); - if (error instanceof AmplifyError) { + if (AmplifyError.isAmplifyError(error)) { printer.print(format.error(`${error.name}: ${error.message}`)); if (error.resolution) { @@ -141,7 +141,7 @@ const handleError = async ({ } await usageDataEmitter?.emitFailure( - error instanceof AmplifyError + AmplifyError.isAmplifyError(error) ? error : AmplifyError.fromError( error && error instanceof Error ? error : new Error(message) diff --git a/packages/cli/src/main_parser_factory.test.ts b/packages/cli/src/main_parser_factory.test.ts index 5a54ffa4a5..e957baa951 100644 --- a/packages/cli/src/main_parser_factory.test.ts +++ b/packages/cli/src/main_parser_factory.test.ts @@ -17,11 +17,22 @@ void describe('main parser', { concurrency: false }, () => { assert.match(output, /generate\s+Generates post deployment artifacts/); }); - void it('shows version', async () => { + void it('includes generate command in shorthand help output', async () => { + const output = await commandRunner.runCommand('-h'); + assert.match(output, /Commands:/); + assert.match(output, /generate\s+Generates post deployment artifacts/); + }); + + void it('shows version for long version option', async () => { const output = await commandRunner.runCommand('--version'); assert.equal(output, `${version}\n`); }); + void it('shows version for shorthand version option', async () => { + const output = await commandRunner.runCommand('-v'); + assert.equal(output, `${version}\n`); + }); + void it('prints help if command is not provided', async () => { await assert.rejects( () => commandRunner.runCommand(''), diff --git a/packages/cli/src/main_parser_factory.ts b/packages/cli/src/main_parser_factory.ts index 5189c7de3f..78fd56a550 100644 --- a/packages/cli/src/main_parser_factory.ts +++ b/packages/cli/src/main_parser_factory.ts @@ -29,6 +29,8 @@ export const createMainParser = (libraryVersion: string): Argv => { .command(createConfigureCommand()) .command(createInfoCommand()) .help() + .alias('h', 'help') + .alias('v', 'version') .demandCommand() .strictCommands() .recommendCommands() diff --git a/packages/client-config/API.md b/packages/client-config/API.md index 397419f74f..cccafacd46 100644 --- a/packages/client-config/API.md +++ b/packages/client-config/API.md @@ -19,6 +19,9 @@ type AmazonCognitoStandardAttributes_2 = 'address' | 'birthdate' | 'email' | 'fa // @public type AmazonCognitoStandardAttributes_3 = 'address' | 'birthdate' | 'email' | 'family_name' | 'gender' | 'given_name' | 'locale' | 'middle_name' | 'name' | 'nickname' | 'phone_number' | 'picture' | 'preferred_username' | 'profile' | 'sub' | 'updated_at' | 'website' | 'zoneinfo'; +// @public +type AmazonCognitoStandardAttributes_4 = 'address' | 'birthdate' | 'email' | 'family_name' | 'gender' | 'given_name' | 'locale' | 'middle_name' | 'name' | 'nickname' | 'phone_number' | 'picture' | 'preferred_username' | 'profile' | 'sub' | 'updated_at' | 'website' | 'zoneinfo'; + // @public interface AmazonLocationServiceConfig { name?: string; @@ -37,6 +40,12 @@ interface AmazonLocationServiceConfig_3 { style?: string; } +// @public +interface AmazonLocationServiceConfig_4 { + name?: string; + style?: string; +} + // @public type AmazonPinpointChannels = 'IN_APP_MESSAGING' | 'FCM' | 'APNS' | 'EMAIL' | 'SMS'; @@ -46,9 +55,15 @@ type AmazonPinpointChannels_2 = 'IN_APP_MESSAGING' | 'FCM' | 'APNS' | 'EMAIL' | // @public type AmazonPinpointChannels_3 = 'IN_APP_MESSAGING' | 'FCM' | 'APNS' | 'EMAIL' | 'SMS'; +// @public +type AmazonPinpointChannels_4 = 'IN_APP_MESSAGING' | 'FCM' | 'APNS' | 'EMAIL' | 'SMS'; + // @public (undocumented) type AmplifyStorageAccessActions = 'read' | 'get' | 'list' | 'write' | 'delete'; +// @public (undocumented) +type AmplifyStorageAccessActions_2 = 'read' | 'get' | 'list' | 'write' | 'delete'; + // @public interface AmplifyStorageAccessRule { // (undocumented) @@ -63,6 +78,20 @@ interface AmplifyStorageAccessRule { resource?: AmplifyStorageAccessActions[]; } +// @public +interface AmplifyStorageAccessRule_2 { + // (undocumented) + authenticated?: AmplifyStorageAccessActions_2[]; + // (undocumented) + entity?: AmplifyStorageAccessActions_2[]; + // (undocumented) + groups?: AmplifyStorageAccessActions_2[]; + // (undocumented) + guest?: AmplifyStorageAccessActions_2[]; + // (undocumented) + resource?: AmplifyStorageAccessActions_2[]; +} + // @public (undocumented) interface AmplifyStorageBucket { // (undocumented) @@ -85,6 +114,26 @@ interface AmplifyStorageBucket_2 { bucket_name: string; // (undocumented) name: string; + // (undocumented) + paths?: { + [k: string]: AmplifyStorageAccessRule_2; + }; +} + +// @public (undocumented) +interface AmplifyStorageBucket_3 { + // (undocumented) + aws_region: string; + // (undocumented) + bucket_name: string; + // (undocumented) + name: string; +} + +// @public +interface AmplifyUserGroupConfig { + // (undocumented) + precedence?: number; } // @public (undocumented) @@ -161,6 +210,9 @@ interface AWSAmplifyBackendOutputs { unauthenticated_identities_enabled?: boolean; mfa_configuration?: 'NONE' | 'OPTIONAL' | 'REQUIRED'; mfa_methods?: ('SMS' | 'TOTP')[]; + groups?: { + [k: string]: AmplifyUserGroupConfig; + }[]; }; custom?: { [k: string]: unknown; @@ -202,19 +254,19 @@ interface AWSAmplifyBackendOutputs { bucket_name: string; buckets?: AmplifyStorageBucket[]; }; - version: '1.2'; + version: '1.3'; } // @public interface AWSAmplifyBackendOutputs_2 { analytics?: { amazon_pinpoint?: { - aws_region: AwsRegion_2; + aws_region: string; app_id: string; }; }; auth?: { - aws_region: AwsRegion_2; + aws_region: string; user_pool_id: string; user_pool_client_id: string; identity_pool_id?: string; @@ -254,7 +306,7 @@ interface AWSAmplifyBackendOutputs_2 { authorization_types: AwsAppsyncAuthorizationType_2[]; }; geo?: { - aws_region: AwsRegion_2; + aws_region: string; maps?: { items: { [k: string]: AmazonLocationServiceConfig_2; @@ -280,7 +332,7 @@ interface AWSAmplifyBackendOutputs_2 { bucket_name: string; buckets?: AmplifyStorageBucket_2[]; }; - version: '1.1'; + version: '1.2'; } // @public @@ -356,6 +408,84 @@ interface AWSAmplifyBackendOutputs_3 { storage?: { aws_region: AwsRegion_3; bucket_name: string; + buckets?: AmplifyStorageBucket_3[]; + }; + version: '1.1'; +} + +// @public +interface AWSAmplifyBackendOutputs_4 { + analytics?: { + amazon_pinpoint?: { + aws_region: AwsRegion_4; + app_id: string; + }; + }; + auth?: { + aws_region: AwsRegion_4; + user_pool_id: string; + user_pool_client_id: string; + identity_pool_id?: string; + password_policy?: { + min_length: number; + require_numbers: boolean; + require_lowercase: boolean; + require_uppercase: boolean; + require_symbols: boolean; + }; + oauth?: { + identity_providers: ('GOOGLE' | 'FACEBOOK' | 'LOGIN_WITH_AMAZON' | 'SIGN_IN_WITH_APPLE')[]; + domain: string; + scopes: string[]; + redirect_sign_in_uri: string[]; + redirect_sign_out_uri: string[]; + response_type: 'code' | 'token'; + }; + standard_required_attributes?: AmazonCognitoStandardAttributes_4[]; + username_attributes?: ('email' | 'phone_number' | 'username')[]; + user_verification_types?: ('email' | 'phone_number')[]; + unauthenticated_identities_enabled?: boolean; + mfa_configuration?: 'NONE' | 'OPTIONAL' | 'REQUIRED'; + mfa_methods?: ('SMS' | 'TOTP')[]; + }; + custom?: { + [k: string]: unknown; + }; + data?: { + aws_region: AwsRegion_4; + url: string; + model_introspection?: { + [k: string]: unknown; + }; + api_key?: string; + default_authorization_type: AwsAppsyncAuthorizationType_4; + authorization_types: AwsAppsyncAuthorizationType_4[]; + }; + geo?: { + aws_region: AwsRegion_4; + maps?: { + items: { + [k: string]: AmazonLocationServiceConfig_4; + }; + default: string; + }; + search_indices?: { + items: string[]; + default: string; + }; + geofence_collections?: { + items: string[]; + default: string; + }; + }; + notifications?: { + aws_region: AwsRegion_4; + amazon_pinpoint_app_id: string; + channels: AmazonPinpointChannels_4[]; + }; + storage?: { + aws_region: AwsRegion_4; + bucket_name: string; }; version: '1'; } @@ -369,6 +499,9 @@ type AwsAppsyncAuthorizationType_2 = 'AMAZON_COGNITO_USER_POOLS' | 'API_KEY' | ' // @public type AwsAppsyncAuthorizationType_3 = 'AMAZON_COGNITO_USER_POOLS' | 'API_KEY' | 'AWS_IAM' | 'AWS_LAMBDA' | 'OPENID_CONNECT'; +// @public +type AwsAppsyncAuthorizationType_4 = 'AMAZON_COGNITO_USER_POOLS' | 'API_KEY' | 'AWS_IAM' | 'AWS_LAMBDA' | 'OPENID_CONNECT'; + // @public (undocumented) type AwsRegion = string; @@ -378,8 +511,11 @@ type AwsRegion_2 = string; // @public (undocumented) type AwsRegion_3 = string; +// @public (undocumented) +type AwsRegion_4 = string; + // @public -export type ClientConfig = clientConfigTypesV1_2.AWSAmplifyBackendOutputs | clientConfigTypesV1_1.AWSAmplifyBackendOutputs | clientConfigTypesV1.AWSAmplifyBackendOutputs; +export type ClientConfig = clientConfigTypesV1_3.AWSAmplifyBackendOutputs | clientConfigTypesV1_2.AWSAmplifyBackendOutputs | clientConfigTypesV1_1.AWSAmplifyBackendOutputs | clientConfigTypesV1.AWSAmplifyBackendOutputs; // @public (undocumented) export enum ClientConfigFileBaseName { @@ -407,31 +543,46 @@ export enum ClientConfigFormat { export type ClientConfigLegacy = Partial; declare namespace clientConfigTypesV1 { + export { + AmazonCognitoStandardAttributes_4 as AmazonCognitoStandardAttributes, + AwsRegion_4 as AwsRegion, + AwsAppsyncAuthorizationType_4 as AwsAppsyncAuthorizationType, + AmazonPinpointChannels_4 as AmazonPinpointChannels, + AWSAmplifyBackendOutputs_4 as AWSAmplifyBackendOutputs, + AmazonLocationServiceConfig_4 as AmazonLocationServiceConfig + } +} +export { clientConfigTypesV1 } + +declare namespace clientConfigTypesV1_1 { export { AmazonCognitoStandardAttributes_3 as AmazonCognitoStandardAttributes, AwsRegion_3 as AwsRegion, AwsAppsyncAuthorizationType_3 as AwsAppsyncAuthorizationType, AmazonPinpointChannels_3 as AmazonPinpointChannels, AWSAmplifyBackendOutputs_3 as AWSAmplifyBackendOutputs, - AmazonLocationServiceConfig_3 as AmazonLocationServiceConfig + AmazonLocationServiceConfig_3 as AmazonLocationServiceConfig, + AmplifyStorageBucket_3 as AmplifyStorageBucket } } -export { clientConfigTypesV1 } +export { clientConfigTypesV1_1 } -declare namespace clientConfigTypesV1_1 { +declare namespace clientConfigTypesV1_2 { export { AmazonCognitoStandardAttributes_2 as AmazonCognitoStandardAttributes, AwsRegion_2 as AwsRegion, AwsAppsyncAuthorizationType_2 as AwsAppsyncAuthorizationType, AmazonPinpointChannels_2 as AmazonPinpointChannels, + AmplifyStorageAccessActions_2 as AmplifyStorageAccessActions, AWSAmplifyBackendOutputs_2 as AWSAmplifyBackendOutputs, AmazonLocationServiceConfig_2 as AmazonLocationServiceConfig, - AmplifyStorageBucket_2 as AmplifyStorageBucket + AmplifyStorageBucket_2 as AmplifyStorageBucket, + AmplifyStorageAccessRule_2 as AmplifyStorageAccessRule } } -export { clientConfigTypesV1_1 } +export { clientConfigTypesV1_2 } -declare namespace clientConfigTypesV1_2 { +declare namespace clientConfigTypesV1_3 { export { AmazonCognitoStandardAttributes, AwsRegion, @@ -439,12 +590,13 @@ declare namespace clientConfigTypesV1_2 { AmazonPinpointChannels, AmplifyStorageAccessActions, AWSAmplifyBackendOutputs, + AmplifyUserGroupConfig, AmazonLocationServiceConfig, AmplifyStorageBucket, AmplifyStorageAccessRule } } -export { clientConfigTypesV1_2 } +export { clientConfigTypesV1_3 } // @public (undocumented) export type ClientConfigVersion = `${ClientConfigVersionOption}`; @@ -458,11 +610,13 @@ export enum ClientConfigVersionOption { // (undocumented) V1_1 = "1.1", // (undocumented) - V1_2 = "1.2" + V1_2 = "1.2", + // (undocumented) + V1_3 = "1.3" } // @public -export type ClientConfigVersionTemplateType = T extends '1.2' ? clientConfigTypesV1_2.AWSAmplifyBackendOutputs : T extends '1.1' ? clientConfigTypesV1_1.AWSAmplifyBackendOutputs : T extends '1' ? clientConfigTypesV1.AWSAmplifyBackendOutputs : never; +export type ClientConfigVersionTemplateType = T extends '1.3' ? clientConfigTypesV1_3.AWSAmplifyBackendOutputs : T extends '1.2' ? clientConfigTypesV1_2.AWSAmplifyBackendOutputs : T extends '1.1' ? clientConfigTypesV1_1.AWSAmplifyBackendOutputs : T extends '1' ? clientConfigTypesV1.AWSAmplifyBackendOutputs : never; // @public (undocumented) export type CustomClientConfig = { @@ -473,7 +627,7 @@ export type CustomClientConfig = { export const DEFAULT_CLIENT_CONFIG_VERSION: ClientConfigVersion; // @public -export const generateClientConfig: (backendIdentifier: DeployedBackendIdentifier, version: T, awsClientProvider?: AWSClientProvider<{ +export const generateClientConfig: (backendIdentifier: DeployedBackendIdentifier, version: T, awsClientProvider?: AWSClientProvider<{ getS3Client: S3Client; getAmplifyClient: AmplifyClient; getCloudFormationClient: CloudFormationClient; diff --git a/packages/client-config/CHANGELOG.md b/packages/client-config/CHANGELOG.md index e459d5953b..558d349fdb 100644 --- a/packages/client-config/CHANGELOG.md +++ b/packages/client-config/CHANGELOG.md @@ -1,5 +1,30 @@ # @aws-amplify/client-config +## 1.5.2 + +### Patch Changes + +- d0d8d4e: Fix a bug where $ sign in dart outputs would fail compilation + +## 1.5.1 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [b56d344] + - @aws-amplify/plugin-types@1.3.1 + +## 1.5.0 + +### Minor Changes + +- 5f46d8d: add user groups to outputs + +### Patch Changes + +- Updated dependencies [5f46d8d] + - @aws-amplify/backend-output-schemas@1.4.0 + ## 1.4.0 ### Minor Changes diff --git a/packages/client-config/package.json b/packages/client-config/package.json index d5d44aba63..9310fa1617 100644 --- a/packages/client-config/package.json +++ b/packages/client-config/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/client-config", - "version": "1.4.0", + "version": "1.5.2", "type": "module", "publishConfig": { "access": "public" @@ -24,11 +24,11 @@ }, "license": "Apache-2.0", "dependencies": { - "@aws-amplify/backend-output-schemas": "^1.2.1", + "@aws-amplify/backend-output-schemas": "^1.4.0", "@aws-amplify/deployed-backend-client": "^1.4.1", "@aws-amplify/model-generator": "^1.0.7", "@aws-amplify/platform-core": "^1.0.7", - "@aws-amplify/plugin-types": "^1.2.2", + "@aws-amplify/plugin-types": "^1.3.1", "zod": "^3.22.2" }, "devDependencies": { diff --git a/packages/client-config/src/client-config-contributor/client_config_contributor_factory.ts b/packages/client-config/src/client-config-contributor/client_config_contributor_factory.ts index 6c8fb49d45..50d07f4d0a 100644 --- a/packages/client-config/src/client-config-contributor/client_config_contributor_factory.ts +++ b/packages/client-config/src/client-config-contributor/client_config_contributor_factory.ts @@ -1,14 +1,16 @@ // Versions of config schemas supported by this package version import { - AuthClientConfigContributor as Auth1_1, + AuthClientConfigContributorV1_1 as Auth1_1, + AuthClientConfigContributor as Auth1_3, CustomClientConfigContributor as Custom1_1, DataClientConfigContributor as Data1_1, StorageClientConfigContributorV1 as Storage1, StorageClientConfigContributorV1_1 as Storage1_1, StorageClientConfigContributor as Storage1_2, - VersionContributor as VersionContributor1_2, + VersionContributor as VersionContributor1_3, VersionContributorV1, VersionContributorV1_1, + VersionContributorV1_2, } from './client_config_contributor_v1.js'; import { ClientConfigContributor } from '../client-config-types/client_config_contributor.js'; @@ -33,11 +35,19 @@ export class ClientConfigContributorFactory { private readonly modelIntrospectionSchemaAdapter: ModelIntrospectionSchemaAdapter ) { this.versionedClientConfigContributors = { + [ClientConfigVersionOption.V1_3]: [ + new Auth1_3(), + new Data1_1(this.modelIntrospectionSchemaAdapter), + new Storage1_2(), + new VersionContributor1_3(), + new Custom1_1(), + ], + [ClientConfigVersionOption.V1_2]: [ new Auth1_1(), new Data1_1(this.modelIntrospectionSchemaAdapter), new Storage1_2(), - new VersionContributor1_2(), + new VersionContributorV1_2(), new Custom1_1(), ], @@ -58,12 +68,12 @@ export class ClientConfigContributorFactory { new Custom1_1(), ], - // Legacy config is derived from V1.2 (latest) of unified default config + // Legacy config is derived from V1.3 (latest) of unified default config [ClientConfigVersionOption.V0]: [ new Auth1_1(), new Data1_1(this.modelIntrospectionSchemaAdapter), new Storage1_2(), - new VersionContributor1_2(), + new VersionContributor1_3(), new Custom1_1(), ], }; diff --git a/packages/client-config/src/client-config-contributor/client_config_contributor_v1.test.ts b/packages/client-config/src/client-config-contributor/client_config_contributor_v1.test.ts index 022bea9dd4..8a00d4ebb4 100644 --- a/packages/client-config/src/client-config-contributor/client_config_contributor_v1.test.ts +++ b/packages/client-config/src/client-config-contributor/client_config_contributor_v1.test.ts @@ -8,7 +8,7 @@ import { } from './client_config_contributor_v1.js'; import { ClientConfig, - clientConfigTypesV1_2, + clientConfigTypesV1_3, } from '../client-config-types/client_config.js'; import assert from 'node:assert'; import { @@ -74,7 +74,7 @@ void describe('auth client config contributor v1', () => { identity_pool_id: 'testIdentityPoolId', unauthenticated_identities_enabled: true, }, - } as Partial + } as Partial ); }); @@ -99,7 +99,7 @@ void describe('auth client config contributor v1', () => { aws_region: 'testRegion', identity_pool_id: 'testIdentityPoolId', }, - } as Partial + } as Partial ); }); @@ -133,7 +133,7 @@ void describe('auth client config contributor v1', () => { require_uppercase: true, }, }, - } as Partial + } as Partial ); }); @@ -166,11 +166,23 @@ void describe('auth client config contributor v1', () => { require_uppercase: false, }, }, - } as Partial + } as Partial ); }); void it('returns translated config when output has auth with zero-config attributes', () => { + const groups = [ + { + ADMINS: { + precedence: 0, + }, + }, + { + EDITORS: { + precedence: 1, + }, + }, + ]; const contributor = new AuthClientConfigContributor(); assert.deepStrictEqual( contributor.contribute({ @@ -197,6 +209,7 @@ void describe('auth client config contributor v1', () => { oauthRedirectSignOut: 'http://logout.com,http://logout2.com', oauthResponseType: 'code', socialProviders: `["GOOGLE","FACEBOOK","SIGN_IN_WITH_APPLE","LOGIN_WITH_AMAZON","GITHUB","DISCORD"]`, + groups: JSON.stringify(groups), }, }, }), @@ -235,12 +248,36 @@ void describe('auth client config contributor v1', () => { redirect_sign_out_uri: ['http://logout.com', 'http://logout2.com'], response_type: 'code', }, + groups: [ + { + ADMINS: { + precedence: 0, + }, + }, + { + EDITORS: { + precedence: 1, + }, + }, + ], }, - } as Partial + } as Partial ); }); void it('returns translated config when output has oauth settings but no social providers', () => { + const groups = [ + { + ADMINS: { + precedence: 0, + }, + }, + { + EDITORS: { + precedence: 1, + }, + }, + ]; const contributor = new AuthClientConfigContributor(); assert.deepStrictEqual( contributor.contribute({ @@ -266,6 +303,7 @@ void describe('auth client config contributor v1', () => { oauthRedirectSignIn: 'http://callback.com,http://callback2.com', oauthRedirectSignOut: 'http://logout.com,http://logout2.com', oauthResponseType: 'code', + groups: JSON.stringify(groups), }, }, }), @@ -299,12 +337,36 @@ void describe('auth client config contributor v1', () => { redirect_sign_out_uri: ['http://logout.com', 'http://logout2.com'], response_type: 'code', }, + groups: [ + { + ADMINS: { + precedence: 0, + }, + }, + { + EDITORS: { + precedence: 1, + }, + }, + ], }, - } as Partial + } as Partial ); }); void describe('auth outputs with mfa', () => { + const groups = [ + { + ADMINS: { + precedence: 0, + }, + }, + { + EDITORS: { + precedence: 1, + }, + }, + ]; const contribution = { version: '1' as const, payload: { @@ -327,6 +389,7 @@ void describe('auth client config contributor v1', () => { oauthRedirectSignIn: 'http://callback.com,http://callback2.com', oauthRedirectSignOut: 'http://logout.com,http://logout2.com', oauthResponseType: 'code', + groups: JSON.stringify(groups), }, }; @@ -357,8 +420,20 @@ void describe('auth client config contributor v1', () => { redirect_sign_out_uri: ['http://logout.com', 'http://logout2.com'], response_type: 'code', }, + groups: [ + { + ADMINS: { + precedence: 0, + }, + }, + { + EDITORS: { + precedence: 1, + }, + }, + ], }, - } as Pick; + } as Pick; void it('returns translated config when mfa is disabled', () => { const contributor = new AuthClientConfigContributor(); @@ -459,7 +534,7 @@ void describe('data client config contributor v1', () => { url: 'testApiEndpoint', aws_region: 'us-east-1', }, - } as Partial); + } as Partial); }); void it('returns translated config with model introspection when resolvable', async () => { @@ -507,7 +582,7 @@ void describe('data client config contributor v1', () => { enums: {}, }, }, - } as Partial); + } as Partial); }); }); @@ -625,6 +700,6 @@ void describe('Custom client config contributor v1', () => { void describe('Custom client config contributor v1', () => { void it('contributes the version correctly', () => { - assert.deepEqual(new VersionContributor().contribute(), { version: '1.2' }); + assert.deepEqual(new VersionContributor().contribute(), { version: '1.3' }); }); }); diff --git a/packages/client-config/src/client-config-contributor/client_config_contributor_v1.ts b/packages/client-config/src/client-config-contributor/client_config_contributor_v1.ts index 425775a978..4bd0879e0b 100644 --- a/packages/client-config/src/client-config-contributor/client_config_contributor_v1.ts +++ b/packages/client-config/src/client-config-contributor/client_config_contributor_v1.ts @@ -12,6 +12,7 @@ import { clientConfigTypesV1, clientConfigTypesV1_1, clientConfigTypesV1_2, + clientConfigTypesV1_3, } from '../client-config-types/client_config.js'; import { ModelIntrospectionSchemaAdapter } from '../model_introspection_schema_adapter.js'; import { AwsAppsyncAuthorizationType } from '../client-config-schema/client_config_v1.1.js'; @@ -21,9 +22,22 @@ import { AmplifyStorageAccessRule } from '../client-config-schema/client_config_ // the same schema (version and other types) /** - * Translator for the version number of ClientConfig of V1.2 + * Translator for the version number of ClientConfig of V1.3 */ export class VersionContributor implements ClientConfigContributor { + /** + * Return the version of the schema types that this contributor uses + */ + contribute = (): ClientConfig => { + return { version: ClientConfigVersionOption.V1_3 }; + }; +} + +/** + * Translator for the version number of ClientConfig of V1.2 + */ +// eslint-disable-next-line @typescript-eslint/naming-convention +export class VersionContributorV1_2 implements ClientConfigContributor { /** * Return the version of the schema types that this contributor uses */ @@ -58,9 +72,181 @@ export class VersionContributorV1 implements ClientConfigContributor { } /** - * Translator for the Auth portion of ClientConfig + * Translator for the Auth portion of ClientConfig in V1.3 */ export class AuthClientConfigContributor implements ClientConfigContributor { + /** + * Given some BackendOutput, contribute the Auth portion of the ClientConfig + */ + contribute = ({ + [authOutputKey]: authOutput, + }: UnifiedBackendOutput): Partial | Record => { + if (authOutput === undefined) { + return {}; + } + + const parseAndAssignObject = ( + obj: T, + key: keyof T, + value: string | undefined + ) => { + if (value == null) { + return; + } + obj[key] = JSON.parse(value); + }; + + const authClientConfig: Partial = + {}; + + authClientConfig.auth = { + user_pool_id: authOutput.payload.userPoolId, + aws_region: authOutput.payload.authRegion, + user_pool_client_id: authOutput.payload.webClientId, + }; + + if (authOutput.payload.identityPoolId) { + authClientConfig.auth.identity_pool_id = + authOutput.payload.identityPoolId; + } + + parseAndAssignObject( + authClientConfig.auth, + 'mfa_methods', + authOutput.payload.mfaTypes + ); + + parseAndAssignObject( + authClientConfig.auth, + 'standard_required_attributes', + authOutput.payload.signupAttributes + ); + + parseAndAssignObject( + authClientConfig.auth, + 'username_attributes', + authOutput.payload.usernameAttributes + ); + + parseAndAssignObject( + authClientConfig.auth, + 'user_verification_types', + authOutput.payload.verificationMechanisms + ); + + parseAndAssignObject( + authClientConfig.auth, + 'groups', + authOutput.payload.groups + ); + + if (authOutput.payload.mfaConfiguration) { + switch (authOutput.payload.mfaConfiguration) { + case 'OFF': { + authClientConfig.auth.mfa_configuration = 'NONE'; + break; + } + case 'OPTIONAL': { + authClientConfig.auth.mfa_configuration = 'OPTIONAL'; + break; + } + case 'ON': { + authClientConfig.auth.mfa_configuration = 'REQUIRED'; + } + } + } + + if ( + authOutput.payload.passwordPolicyMinLength || + authOutput.payload.passwordPolicyRequirements + ) { + authClientConfig.auth.password_policy = { + min_length: 8, // This is the default that is matching what construct defines. + // Values below are set to false instead of being undefined as libraries expect defined values. + // They are overridden below with construct outputs (default or not) if applicable. + require_lowercase: false, + require_numbers: false, + require_symbols: false, + require_uppercase: false, + }; + if (authOutput.payload.passwordPolicyMinLength) { + authClientConfig.auth.password_policy.min_length = Number.parseInt( + authOutput.payload.passwordPolicyMinLength + ); + } + if (authOutput.payload.passwordPolicyRequirements) { + const requirements = JSON.parse( + authOutput.payload.passwordPolicyRequirements + ) as string[]; + for (const requirement of requirements) { + switch (requirement) { + case 'REQUIRES_NUMBERS': + authClientConfig.auth.password_policy.require_numbers = true; + break; + case 'REQUIRES_LOWERCASE': + authClientConfig.auth.password_policy.require_lowercase = true; + break; + case 'REQUIRES_UPPERCASE': + authClientConfig.auth.password_policy.require_uppercase = true; + break; + case 'REQUIRES_SYMBOLS': + authClientConfig.auth.password_policy.require_symbols = true; + break; + } + } + } + } + + // OAuth settings are present if both oauthRedirectSignIn and oauthRedirectSignOut are. + if ( + authOutput.payload.oauthRedirectSignIn && + authOutput.payload.oauthRedirectSignOut + ) { + let socialProviders = authOutput.payload.socialProviders + ? JSON.parse(authOutput.payload.socialProviders) + : []; + if (Array.isArray(socialProviders)) { + socialProviders = socialProviders.filter(this.isValidIdentityProvider); + } + authClientConfig.auth.oauth = { + identity_providers: socialProviders, + redirect_sign_in_uri: authOutput.payload.oauthRedirectSignIn.split(','), + redirect_sign_out_uri: + authOutput.payload.oauthRedirectSignOut.split(','), + response_type: authOutput.payload.oauthResponseType as 'code' | 'token', + scopes: authOutput.payload.oauthScope + ? JSON.parse(authOutput.payload.oauthScope) + : [], + domain: authOutput.payload.oauthCognitoDomain ?? '', + }; + } + + if (authOutput.payload.allowUnauthenticatedIdentities) { + authClientConfig.auth.unauthenticated_identities_enabled = + authOutput.payload.allowUnauthenticatedIdentities === 'true'; + } + + return authClientConfig; + }; + + // Define a type guard function to check if a value is a valid IdentityProvider + isValidIdentityProvider = (identityProvider: string): boolean => { + return [ + 'GOOGLE', + 'FACEBOOK', + 'LOGIN_WITH_AMAZON', + 'SIGN_IN_WITH_APPLE', + ].includes(identityProvider); + }; +} + +/** + * Translator for the Auth portion of ClientConfig in V1.2 + */ +// eslint-disable-next-line @typescript-eslint/naming-convention +export class AuthClientConfigContributorV1_1 + implements ClientConfigContributor +{ /** * Given some BackendOutput, contribute the Auth portion of the ClientConfig */ diff --git a/packages/client-config/src/client-config-schema/client_config_v1.3.ts b/packages/client-config/src/client-config-schema/client_config_v1.3.ts new file mode 100644 index 0000000000..560b0773ec --- /dev/null +++ b/packages/client-config/src/client-config-schema/client_config_v1.3.ts @@ -0,0 +1,282 @@ +/* eslint-disable */ +/** + * This file was automatically generated by json-schema-to-typescript. + * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, + * and run json-schema-to-typescript to regenerate this file. + */ + +/** + * Amazon Cognito standard attributes for users -- https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-settings-attributes.html + */ +export type AmazonCognitoStandardAttributes = + | 'address' + | 'birthdate' + | 'email' + | 'family_name' + | 'gender' + | 'given_name' + | 'locale' + | 'middle_name' + | 'name' + | 'nickname' + | 'phone_number' + | 'picture' + | 'preferred_username' + | 'profile' + | 'sub' + | 'updated_at' + | 'website' + | 'zoneinfo'; +export type AwsRegion = string; +/** + * List of supported auth types for AWS AppSync + */ +export type AwsAppsyncAuthorizationType = + | 'AMAZON_COGNITO_USER_POOLS' + | 'API_KEY' + | 'AWS_IAM' + | 'AWS_LAMBDA' + | 'OPENID_CONNECT'; +/** + * supported channels for Amazon Pinpoint + */ +export type AmazonPinpointChannels = + | 'IN_APP_MESSAGING' + | 'FCM' + | 'APNS' + | 'EMAIL' + | 'SMS'; +export type AmplifyStorageAccessActions = + | 'read' + | 'get' + | 'list' + | 'write' + | 'delete'; + +/** + * Config format for Amplify Gen 2 client libraries to communicate with backend services. + */ +export interface AWSAmplifyBackendOutputs { + /** + * Version of this schema + */ + version: '1.3'; + /** + * Outputs manually specified by developers for use with frontend library + */ + analytics?: { + amazon_pinpoint?: { + /** + * AWS Region of Amazon Pinpoint resources + */ + aws_region: string; + app_id: string; + }; + }; + /** + * Outputs generated from defineAuth + */ + auth?: { + /** + * AWS Region of Amazon Cognito resources + */ + aws_region: string; + /** + * Cognito User Pool ID + */ + user_pool_id: string; + /** + * Cognito User Pool Client ID + */ + user_pool_client_id: string; + /** + * Cognito Identity Pool ID + */ + identity_pool_id?: string; + /** + * Cognito User Pool password policy + */ + password_policy?: { + min_length: number; + require_numbers: boolean; + require_lowercase: boolean; + require_uppercase: boolean; + require_symbols: boolean; + }; + oauth?: { + /** + * Identity providers set on Cognito User Pool + * + * @minItems 0 + */ + identity_providers: ( + | 'GOOGLE' + | 'FACEBOOK' + | 'LOGIN_WITH_AMAZON' + | 'SIGN_IN_WITH_APPLE' + )[]; + /** + * Domain used for identity providers + */ + domain: string; + /** + * @minItems 0 + */ + scopes: string[]; + /** + * URIs used to redirect after signing in using an identity provider + * + * @minItems 1 + */ + redirect_sign_in_uri: string[]; + /** + * URIs used to redirect after signing out + * + * @minItems 1 + */ + redirect_sign_out_uri: string[]; + response_type: 'code' | 'token'; + }; + /** + * Cognito User Pool standard attributes required for signup + * + * @minItems 0 + */ + standard_required_attributes?: AmazonCognitoStandardAttributes[]; + /** + * Cognito User Pool username attributes + * + * @minItems 1 + */ + username_attributes?: ('email' | 'phone_number' | 'username')[]; + user_verification_types?: ('email' | 'phone_number')[]; + unauthenticated_identities_enabled?: boolean; + mfa_configuration?: 'NONE' | 'OPTIONAL' | 'REQUIRED'; + mfa_methods?: ('SMS' | 'TOTP')[]; + groups?: { + [k: string]: AmplifyUserGroupConfig; + }[]; + }; + /** + * Outputs generated from defineData + */ + data?: { + aws_region: AwsRegion; + /** + * AppSync endpoint URL + */ + url: string; + /** + * generated model introspection schema for use with generateClient + */ + model_introspection?: { + [k: string]: unknown; + }; + api_key?: string; + default_authorization_type: AwsAppsyncAuthorizationType; + authorization_types: AwsAppsyncAuthorizationType[]; + }; + /** + * Outputs manually specified by developers for use with frontend library + */ + geo?: { + /** + * AWS Region of Amazon Location Service resources + */ + aws_region: string; + /** + * Maps from Amazon Location Service + */ + maps?: { + items: { + [k: string]: AmazonLocationServiceConfig; + }; + default: string; + }; + /** + * Location search (search by places, addresses, coordinates) + */ + search_indices?: { + /** + * @minItems 1 + */ + items: string[]; + default: string; + }; + /** + * Geofencing (visualize virtual perimeters) + */ + geofence_collections?: { + /** + * @minItems 1 + */ + items: string[]; + default: string; + }; + }; + /** + * Outputs manually specified by developers for use with frontend library + */ + notifications?: { + aws_region: AwsRegion; + amazon_pinpoint_app_id: string; + /** + * @minItems 1 + */ + channels: AmazonPinpointChannels[]; + }; + /** + * Outputs generated from defineStorage + */ + storage?: { + aws_region: AwsRegion; + bucket_name: string; + buckets?: AmplifyStorageBucket[]; + }; + /** + * Outputs generated from backend.addOutput({ custom: }) + */ + custom?: { + [k: string]: unknown; + }; +} +/** + * This interface was referenced by `undefined`'s JSON-Schema definition + * via the `patternProperty` ".*". + */ +export interface AmplifyUserGroupConfig { + precedence?: number; +} +/** + * This interface was referenced by `undefined`'s JSON-Schema definition + * via the `patternProperty` ".*". + */ +export interface AmazonLocationServiceConfig { + /** + * Map resource name + */ + name?: string; + /** + * Map style + */ + style?: string; +} +export interface AmplifyStorageBucket { + name: string; + bucket_name: string; + aws_region: string; + paths?: { + [k: string]: AmplifyStorageAccessRule; + }; +} +/** + * This interface was referenced by `undefined`'s JSON-Schema definition + * via the `patternProperty` ".*". + */ +export interface AmplifyStorageAccessRule { + guest?: AmplifyStorageAccessActions[]; + authenticated?: AmplifyStorageAccessActions[]; + groups?: AmplifyStorageAccessActions[]; + entity?: AmplifyStorageAccessActions[]; + resource?: AmplifyStorageAccessActions[]; +} diff --git a/packages/client-config/src/client-config-schema/schema_v1.3.json b/packages/client-config/src/client-config-schema/schema_v1.3.json new file mode 100644 index 0000000000..bf89de5504 --- /dev/null +++ b/packages/client-config/src/client-config-schema/schema_v1.3.json @@ -0,0 +1,500 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://amplify.aws/2024-02/outputs-schema.json", + "title": "AWS Amplify Backend Outputs", + "description": "Config format for Amplify Gen 2 client libraries to communicate with backend services.", + "type": "object", + "additionalProperties": false, + "properties": { + "$schema": { + "description": "JSON schema", + "type": "string" + }, + "version": { + "description": "Version of this schema", + "const": "1.3" + }, + "analytics": { + "description": "Outputs manually specified by developers for use with frontend library", + "type": "object", + "additionalProperties": false, + "properties": { + "amazon_pinpoint": { + "type": "object", + "additionalProperties": false, + "properties": { + "aws_region": { + "description": "AWS Region of Amazon Pinpoint resources", + "$ref": "#/$defs/aws_region" + }, + "app_id": { + "type": "string" + } + }, + "required": ["aws_region", "app_id"] + } + } + }, + "auth": { + "description": "Outputs generated from defineAuth", + "type": "object", + "additionalProperties": false, + "properties": { + "aws_region": { + "description": "AWS Region of Amazon Cognito resources", + "$ref": "#/$defs/aws_region" + }, + "user_pool_id": { + "description": "Cognito User Pool ID", + "type": "string" + }, + "user_pool_client_id": { + "description": "Cognito User Pool Client ID", + "type": "string" + }, + "identity_pool_id": { + "description": "Cognito Identity Pool ID", + "type": "string" + }, + "password_policy": { + "description": "Cognito User Pool password policy", + "type": "object", + "additionalProperties": false, + "properties": { + "min_length": { + "type": "integer", + "minimum": 6, + "maximum": 99 + }, + "require_numbers": { + "type": "boolean" + }, + "require_lowercase": { + "type": "boolean" + }, + "require_uppercase": { + "type": "boolean" + }, + "require_symbols": { + "type": "boolean" + } + }, + "required": [ + "min_length", + "require_numbers", + "require_lowercase", + "require_uppercase", + "require_symbols" + ] + }, + "oauth": { + "type": "object", + "additionalProperties": false, + "properties": { + "identity_providers": { + "description": "Identity providers set on Cognito User Pool", + "type": "array", + "items": { + "type": "string", + "enum": [ + "GOOGLE", + "FACEBOOK", + "LOGIN_WITH_AMAZON", + "SIGN_IN_WITH_APPLE" + ] + }, + "minItems": 0, + "uniqueItems": true + }, + "domain": { + "description": "Domain used for identity providers", + "type": "string" + }, + "scopes": { + "type": "array", + "items": { + "type": "string" + }, + "minItems": 0, + "uniqueItems": true + }, + "redirect_sign_in_uri": { + "description": "URIs used to redirect after signing in using an identity provider", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "uniqueItems": true + }, + "redirect_sign_out_uri": { + "description": "URIs used to redirect after signing out", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "uniqueItems": true + }, + "response_type": { + "type": "string", + "enum": ["code", "token"] + } + }, + "required": [ + "identity_providers", + "domain", + "scopes", + "redirect_sign_in_uri", + "redirect_sign_out_uri", + "response_type" + ] + }, + "standard_required_attributes": { + "description": "Cognito User Pool standard attributes required for signup", + "type": "array", + "items": { + "$ref": "#/$defs/amazon_cognito_standard_attributes" + }, + "minItems": 0, + "uniqueItems": true + }, + "username_attributes": { + "description": "Cognito User Pool username attributes", + "type": "array", + "items": { + "type": "string", + "enum": ["email", "phone_number", "username"] + }, + "minItems": 1, + "uniqueItems": true + }, + "user_verification_types": { + "type": "array", + "items": { + "type": "string", + "enum": ["email", "phone_number"] + } + }, + "unauthenticated_identities_enabled": { + "type": "boolean", + "default": true + }, + "mfa_configuration": { + "type": "string", + "enum": ["NONE", "OPTIONAL", "REQUIRED"] + }, + "mfa_methods": { + "type": "array", + "items": { + "enum": ["SMS", "TOTP"] + } + }, + "groups": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "propertyNames": { + "type": "string" + }, + "patternProperties": { + ".*": { + "$ref": "#/$defs/amplify_user_group_config" + } + } + } + } + }, + "required": ["aws_region", "user_pool_id", "user_pool_client_id"] + }, + "data": { + "description": "Outputs generated from defineData", + "type": "object", + "additionalProperties": false, + "properties": { + "aws_region": { + "$ref": "#/$defs/aws_region" + }, + "url": { + "description": "AppSync endpoint URL", + "type": "string" + }, + "model_introspection": { + "description": "generated model introspection schema for use with generateClient", + "type": "object" + }, + "api_key": { + "type": "string" + }, + "default_authorization_type": { + "$ref": "#/$defs/aws_appsync_authorization_type" + }, + "authorization_types": { + "type": "array", + "items": { + "$ref": "#/$defs/aws_appsync_authorization_type" + } + } + }, + "required": [ + "aws_region", + "url", + "default_authorization_type", + "authorization_types" + ] + }, + "geo": { + "description": "Outputs manually specified by developers for use with frontend library", + "type": "object", + "additionalProperties": false, + "properties": { + "aws_region": { + "description": "AWS Region of Amazon Location Service resources", + "$ref": "#/$defs/aws_region" + }, + "maps": { + "description": "Maps from Amazon Location Service", + "type": "object", + "additionalProperties": false, + "properties": { + "items": { + "type": "object", + "additionalProperties": false, + "propertyNames": { + "description": "Amazon Location Service Map name", + "type": "string" + }, + "patternProperties": { + ".*": { + "$ref": "#/$defs/amazon_location_service_config" + } + } + }, + "default": { + "type": "string" + } + }, + "required": ["items", "default"] + }, + "search_indices": { + "description": "Location search (search by places, addresses, coordinates)", + "type": "object", + "additionalProperties": false, + "properties": { + "items": { + "type": "array", + "uniqueItems": true, + "minItems": 1, + "items": { + "description": "Actual search name", + "type": "string" + } + }, + "default": { + "type": "string" + } + }, + "required": ["items", "default"] + }, + "geofence_collections": { + "description": "Geofencing (visualize virtual perimeters)", + "type": "object", + "additionalProperties": false, + "properties": { + "items": { + "type": "array", + "uniqueItems": true, + "minItems": 1, + "items": { + "description": "Geofence name", + "type": "string" + } + }, + "default": { + "type": "string" + } + }, + "required": ["items", "default"] + } + }, + "required": ["aws_region"] + }, + "notifications": { + "type": "object", + "description": "Outputs manually specified by developers for use with frontend library", + "additionalProperties": false, + "properties": { + "aws_region": { + "$ref": "#/$defs/aws_region" + }, + "amazon_pinpoint_app_id": { + "type": "string" + }, + "channels": { + "type": "array", + "items": { + "$ref": "#/$defs/amazon_pinpoint_channels" + }, + "minItems": 1, + "uniqueItems": true + } + }, + "required": ["aws_region", "amazon_pinpoint_app_id", "channels"] + }, + "storage": { + "type": "object", + "description": "Outputs generated from defineStorage", + "additionalProperties": false, + "properties": { + "aws_region": { + "$ref": "#/$defs/aws_region" + }, + "bucket_name": { + "type": "string" + }, + "buckets": { + "type": "array", + "items": { + "$ref": "#/$defs/amplify_storage_bucket" + } + } + }, + "required": ["aws_region", "bucket_name"] + }, + "custom": { + "description": "Outputs generated from backend.addOutput({ custom: })", + "type": "object" + } + }, + "required": ["version"], + "$defs": { + "amplify_storage_access_actions": { + "type": "string", + "enum": ["read", "get", "list", "write", "delete"] + }, + "amplify_storage_access_rule": { + "type": "object", + "additionalProperties": false, + "properties": { + "guest": { + "type": "array", + "items": { + "$ref": "#/$defs/amplify_storage_access_actions" + } + }, + "authenticated": { + "type": "array", + "items": { + "$ref": "#/$defs/amplify_storage_access_actions" + } + }, + "groups": { + "type": "array", + "items": { + "$ref": "#/$defs/amplify_storage_access_actions" + } + }, + "entity": { + "type": "array", + "items": { + "$ref": "#/$defs/amplify_storage_access_actions" + } + }, + "resource": { + "type": "array", + "items": { + "$ref": "#/$defs/amplify_storage_access_actions" + } + } + } + }, + "amplify_storage_bucket": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "bucket_name": { + "type": "string" + }, + "aws_region": { + "type": "string" + }, + "paths": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + ".*": { + "$ref": "#/$defs/amplify_storage_access_rule" + } + } + } + }, + "required": ["bucket_name", "aws_region", "name"] + }, + "aws_region": { + "type": "string" + }, + "amazon_cognito_standard_attributes": { + "description": "Amazon Cognito standard attributes for users -- https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-settings-attributes.html", + "type": "string", + "enum": [ + "address", + "birthdate", + "email", + "family_name", + "gender", + "given_name", + "locale", + "middle_name", + "name", + "nickname", + "phone_number", + "picture", + "preferred_username", + "profile", + "sub", + "updated_at", + "website", + "zoneinfo" + ] + }, + "aws_appsync_authorization_type": { + "description": "List of supported auth types for AWS AppSync", + "type": "string", + "enum": [ + "AMAZON_COGNITO_USER_POOLS", + "API_KEY", + "AWS_IAM", + "AWS_LAMBDA", + "OPENID_CONNECT" + ] + }, + "amazon_location_service_config": { + "type": "object", + "additionalProperties": false, + "properties": { + "style": { + "description": "Map style", + "type": "string" + } + } + }, + "amazon_pinpoint_channels": { + "description": "supported channels for Amazon Pinpoint", + "type": "string", + "enum": ["IN_APP_MESSAGING", "FCM", "APNS", "EMAIL", "SMS"] + }, + "amplify_user_group_config": { + "type": "object", + "additionalProperties": false, + "properties": { + "precedence": { + "type": "integer" + } + } + } + } +} diff --git a/packages/client-config/src/client-config-types/client_config.ts b/packages/client-config/src/client-config-types/client_config.ts index 5467484d67..501d52792b 100644 --- a/packages/client-config/src/client-config-types/client_config.ts +++ b/packages/client-config/src/client-config-types/client_config.ts @@ -12,6 +12,7 @@ import * as clientConfigTypesV1 from '../client-config-schema/client_config_v1.j /* eslint-disable @typescript-eslint/naming-convention */ import * as clientConfigTypesV1_1 from '../client-config-schema/client_config_v1.1.js'; import * as clientConfigTypesV1_2 from '../client-config-schema/client_config_v1.2.js'; +import * as clientConfigTypesV1_3 from '../client-config-schema/client_config_v1.3.js'; /* eslint-enable @typescript-eslint/naming-convention */ /** @@ -34,24 +35,31 @@ export type ClientConfigLegacy = Partial< * ClientConfig = clientConfigTypesV1.AWSAmplifyBackendOutputs | clientConfigTypesV2.AWSAmplifyBackendOutputs; */ export type ClientConfig = + | clientConfigTypesV1_3.AWSAmplifyBackendOutputs | clientConfigTypesV1_2.AWSAmplifyBackendOutputs | clientConfigTypesV1_1.AWSAmplifyBackendOutputs | clientConfigTypesV1.AWSAmplifyBackendOutputs; -export { clientConfigTypesV1, clientConfigTypesV1_1, clientConfigTypesV1_2 }; +export { + clientConfigTypesV1, + clientConfigTypesV1_1, + clientConfigTypesV1_2, + clientConfigTypesV1_3, +}; export enum ClientConfigVersionOption { V0 = '0', // Legacy client config V1 = '1', V1_1 = '1.1', V1_2 = '1.2', + V1_3 = '1.3', } export type ClientConfigVersion = `${ClientConfigVersionOption}`; // Client config version that is generated by default if customers didn't specify one export const DEFAULT_CLIENT_CONFIG_VERSION: ClientConfigVersion = - ClientConfigVersionOption.V1_2; + ClientConfigVersionOption.V1_3; /** * Return type of `getClientConfig`. This types narrow the returned client config version @@ -64,7 +72,9 @@ export const DEFAULT_CLIENT_CONFIG_VERSION: ClientConfigVersion = * ? clientConfigTypesV2.AWSAmplifyBackendOutputs * : never; */ -export type ClientConfigVersionTemplateType = T extends '1.2' +export type ClientConfigVersionTemplateType = T extends '1.3' + ? clientConfigTypesV1_3.AWSAmplifyBackendOutputs + : T extends '1.2' ? clientConfigTypesV1_2.AWSAmplifyBackendOutputs : T extends '1.1' ? clientConfigTypesV1_1.AWSAmplifyBackendOutputs diff --git a/packages/client-config/src/client-config-writer/client_config_formatter_default.test.ts b/packages/client-config/src/client-config-writer/client_config_formatter_default.test.ts index e605ef8967..33a6e57ff0 100644 --- a/packages/client-config/src/client-config-writer/client_config_formatter_default.test.ts +++ b/packages/client-config/src/client-config-writer/client_config_formatter_default.test.ts @@ -13,7 +13,7 @@ void describe('client config formatter', () => { const sampleIdentityPoolId = 'test_identity_pool_id'; const sampleUserPoolClientId = 'test_user_pool_client_id'; const clientConfig: ClientConfig = { - version: '1.2', + version: '1.3', auth: { aws_region: sampleRegion, identity_pool_id: sampleIdentityPoolId, @@ -23,7 +23,7 @@ void describe('client config formatter', () => { }; const expectedConfigReturned: ClientConfig = { - version: '1.2', + version: '1.3', auth: { aws_region: sampleRegion, identity_pool_id: sampleIdentityPoolId, @@ -50,7 +50,7 @@ void describe('client config formatter', () => { ClientConfigFormat.DART ); - assert.ok(formattedConfig.startsWith("const amplifyConfig = '''")); + assert.ok(formattedConfig.startsWith("const amplifyConfig = r'''")); assert.ok( formattedConfig.includes(JSON.stringify(expectedConfigReturned, null, 2)) ); diff --git a/packages/client-config/src/client-config-writer/client_config_formatter_default.ts b/packages/client-config/src/client-config-writer/client_config_formatter_default.ts index e05e68704a..54994f0096 100644 --- a/packages/client-config/src/client-config-writer/client_config_formatter_default.ts +++ b/packages/client-config/src/client-config-writer/client_config_formatter_default.ts @@ -16,7 +16,9 @@ export class ClientConfigFormatterDefault implements ClientConfigFormatter { format = (clientConfig: ClientConfig, format: ClientConfigFormat): string => { switch (format) { case ClientConfigFormat.DART: { - return `const amplifyConfig = '''${JSON.stringify( + // Using raw string, i.e. r''' to disable Dart's interpolations + // because we're using special characters like $ in some outputs. + return `const amplifyConfig = r'''${JSON.stringify( clientConfig, null, 2 diff --git a/packages/client-config/src/client-config-writer/client_config_formatter_legacy.test.ts b/packages/client-config/src/client-config-writer/client_config_formatter_legacy.test.ts index 737257273d..bfe343250d 100644 --- a/packages/client-config/src/client-config-writer/client_config_formatter_legacy.test.ts +++ b/packages/client-config/src/client-config-writer/client_config_formatter_legacy.test.ts @@ -20,7 +20,7 @@ void describe('client config formatter', () => { const sampleUserPoolId = randomUUID(); const clientConfig: ClientConfig = { - version: '1.2', + version: '1.3', auth: { aws_region: sampleRegion, identity_pool_id: sampleIdentityPoolId, @@ -109,7 +109,7 @@ void describe('client config formatter', () => { expectedLegacyConfig.aws_user_pools_id ); - assert.ok(formattedConfig.startsWith("const amplifyConfig = '''")); + assert.ok(formattedConfig.startsWith("const amplifyConfig = r'''")); assert.ok( formattedConfig.includes(JSON.stringify(clientConfigMobile, null, 2)) ); diff --git a/packages/client-config/src/client-config-writer/client_config_formatter_legacy.ts b/packages/client-config/src/client-config-writer/client_config_formatter_legacy.ts index 153b6add97..fdb1355259 100644 --- a/packages/client-config/src/client-config-writer/client_config_formatter_legacy.ts +++ b/packages/client-config/src/client-config-writer/client_config_formatter_legacy.ts @@ -29,7 +29,9 @@ export class ClientConfigFormatterLegacy implements ClientConfigFormatter { }export default amplifyConfig;${os.EOL}`; } case ClientConfigFormat.DART: { - return `const amplifyConfig = '''${JSON.stringify( + // Using raw string, i.e. r''' to disable Dart's interpolations + // because we're using special characters like $ in some outputs. + return `const amplifyConfig = r'''${JSON.stringify( this.configConverter.convertToMobileConfig(legacyConfig), null, 2 diff --git a/packages/client-config/src/client-config-writer/client_config_to_legacy_converter.test.ts b/packages/client-config/src/client-config-writer/client_config_to_legacy_converter.test.ts index dda5c288a9..ce5ce1b110 100644 --- a/packages/client-config/src/client-config-writer/client_config_to_legacy_converter.test.ts +++ b/packages/client-config/src/client-config-writer/client_config_to_legacy_converter.test.ts @@ -26,7 +26,7 @@ void describe('ClientConfigLegacyConverter', () => { version: '3' as any, }), new AmplifyFault('UnsupportedClientConfigVersionFault', { - message: 'Only version 1.2 of ClientConfig is supported.', + message: 'Only version 1.3 of ClientConfig is supported.', }) ); }); @@ -35,7 +35,7 @@ void describe('ClientConfigLegacyConverter', () => { const converter = new ClientConfigLegacyConverter(); const v1Config: ClientConfig = { - version: ClientConfigVersionOption.V1_2, + version: ClientConfigVersionOption.V1_3, auth: { identity_pool_id: 'testIdentityPoolId', user_pool_id: 'testUserPoolId', @@ -133,7 +133,7 @@ void describe('ClientConfigLegacyConverter', () => { const converter = new ClientConfigLegacyConverter(); const v1Config: ClientConfig = { - version: ClientConfigVersionOption.V1_2, + version: ClientConfigVersionOption.V1_3, data: { aws_region: 'testRegion', url: 'testUrl', @@ -274,7 +274,7 @@ void describe('ClientConfigLegacyConverter', () => { const converter = new ClientConfigLegacyConverter(); const v1Config: ClientConfig = { - version: ClientConfigVersionOption.V1_2, + version: ClientConfigVersionOption.V1_3, storage: { aws_region: 'testRegion', bucket_name: 'testBucket', @@ -296,7 +296,7 @@ void describe('ClientConfigLegacyConverter', () => { const converter = new ClientConfigLegacyConverter(); const v1Config: ClientConfig = { - version: ClientConfigVersionOption.V1_2, + version: ClientConfigVersionOption.V1_3, custom: { customKey: { customNestedKey: { @@ -327,7 +327,7 @@ void describe('ClientConfigLegacyConverter', () => { const converter = new ClientConfigLegacyConverter(); const v1Config: ClientConfig = { - version: ClientConfigVersionOption.V1_2, + version: ClientConfigVersionOption.V1_3, analytics: { amazon_pinpoint: { aws_region: 'testRegion', @@ -356,7 +356,7 @@ void describe('ClientConfigLegacyConverter', () => { const converter = new ClientConfigLegacyConverter(); const v1Config: ClientConfig = { - version: ClientConfigVersionOption.V1_2, + version: ClientConfigVersionOption.V1_3, geo: { aws_region: 'testRegion', maps: { @@ -409,7 +409,7 @@ void describe('ClientConfigLegacyConverter', () => { const converter = new ClientConfigLegacyConverter(); let v1Config: ClientConfig = { - version: ClientConfigVersionOption.V1_2, + version: ClientConfigVersionOption.V1_3, notifications: { amazon_pinpoint_app_id: 'testAppId', aws_region: 'testRegion', @@ -452,7 +452,7 @@ void describe('ClientConfigLegacyConverter', () => { // both APNS and FCM cannot be specified together as they both map to Push. v1Config = { - version: ClientConfigVersionOption.V1_2, + version: ClientConfigVersionOption.V1_3, notifications: { amazon_pinpoint_app_id: 'testAppId', aws_region: 'testRegion', diff --git a/packages/client-config/src/client-config-writer/client_config_to_legacy_converter.ts b/packages/client-config/src/client-config-writer/client_config_to_legacy_converter.ts index 3131b041ed..c3b89dcf6e 100644 --- a/packages/client-config/src/client-config-writer/client_config_to_legacy_converter.ts +++ b/packages/client-config/src/client-config-writer/client_config_to_legacy_converter.ts @@ -2,7 +2,7 @@ import { AmplifyFault } from '@aws-amplify/platform-core'; import { ClientConfig, ClientConfigLegacy, - clientConfigTypesV1_2, + clientConfigTypesV1_3, } from '../client-config-types/client_config.js'; import { @@ -22,10 +22,10 @@ export class ClientConfigLegacyConverter { * Converts client config to a shape consumable by legacy libraries. */ convertToLegacyConfig = (clientConfig: ClientConfig): ClientConfigLegacy => { - // We can only convert from V1.2 of ClientConfig. For everything else, throw - if (!this.isClientConfigV1_2(clientConfig)) { + // We can only convert from V1.3 of ClientConfig. For everything else, throw + if (!this.isClientConfigV1_3(clientConfig)) { throw new AmplifyFault('UnsupportedClientConfigVersionFault', { - message: 'Only version 1.2 of ClientConfig is supported.', + message: 'Only version 1.3 of ClientConfig is supported.', }); } @@ -274,9 +274,9 @@ export class ClientConfigLegacyConverter { }; // eslint-disable-next-line @typescript-eslint/naming-convention - isClientConfigV1_2 = ( + isClientConfigV1_3 = ( clientConfig: ClientConfig - ): clientConfig is clientConfigTypesV1_2.AWSAmplifyBackendOutputs => { - return clientConfig.version === '1.2'; + ): clientConfig is clientConfigTypesV1_3.AWSAmplifyBackendOutputs => { + return clientConfig.version === '1.3'; }; } diff --git a/packages/client-config/src/client-config-writer/client_config_writer.test.ts b/packages/client-config/src/client-config-writer/client_config_writer.test.ts index 7f3771224d..e181d3deb2 100644 --- a/packages/client-config/src/client-config-writer/client_config_writer.test.ts +++ b/packages/client-config/src/client-config-writer/client_config_writer.test.ts @@ -42,7 +42,7 @@ void describe('client config writer', () => { }); const clientConfig: ClientConfig = { - version: '1.2', + version: '1.3', auth: { aws_region: sampleRegion, identity_pool_id: sampleIdentityPoolId, diff --git a/packages/client-config/src/generate_empty_client_config_to_file.test.ts b/packages/client-config/src/generate_empty_client_config_to_file.test.ts index 34dbbc9f82..21abe85a03 100644 --- a/packages/client-config/src/generate_empty_client_config_to_file.test.ts +++ b/packages/client-config/src/generate_empty_client_config_to_file.test.ts @@ -30,15 +30,15 @@ void describe('generate empty client config to file', () => { path.join(process.cwd(), 'userOutDir', 'amplifyconfiguration.ts') ); }); - void it('correctly generates an empty file for client config version 1.2', async () => { + void it('correctly generates an empty file for client config version 1.3', async () => { await generateEmptyClientConfigToFile( - ClientConfigVersionOption.V1_2, + ClientConfigVersionOption.V1_3, 'userOutDir' ); assert.equal(writeFileMock.mock.callCount(), 1); assert.deepStrictEqual( writeFileMock.mock.calls[0].arguments[1], - `{\n "version": "1.2"\n}` + `{\n "version": "1.3"\n}` ); assert.deepStrictEqual( writeFileMock.mock.calls[0].arguments[0], diff --git a/packages/client-config/src/generate_empty_client_config_to_file.ts b/packages/client-config/src/generate_empty_client_config_to_file.ts index 260dad6173..b2563330b1 100644 --- a/packages/client-config/src/generate_empty_client_config_to_file.ts +++ b/packages/client-config/src/generate_empty_client_config_to_file.ts @@ -15,7 +15,7 @@ export const generateEmptyClientConfigToFile = async ( format?: ClientConfigFormat ): Promise => { const clientConfig: ClientConfig = { - version: '1.2', + version: '1.3', }; return writeClientConfigToFile(clientConfig, version, outDir, format); }; diff --git a/packages/client-config/src/unified_client_config_generator.test.ts b/packages/client-config/src/unified_client_config_generator.test.ts index c466311a68..496d59df9d 100644 --- a/packages/client-config/src/unified_client_config_generator.test.ts +++ b/packages/client-config/src/unified_client_config_generator.test.ts @@ -26,6 +26,140 @@ const stubClientProvider = { }; void describe('UnifiedClientConfigGenerator', () => { void describe('generateClientConfig', () => { + void it('transforms backend output into client config for V1.3', async () => { + const groups = [ + { + ADMINS: { + precedence: 0, + }, + }, + { + EDITORS: { + precedence: 1, + }, + }, + ]; + const stubOutput: UnifiedBackendOutput = { + [platformOutputKey]: { + version: '1', + payload: { + deploymentType: 'branch', + region: 'us-east-1', + }, + }, + [authOutputKey]: { + version: '1', + payload: { + identityPoolId: 'testIdentityPoolId', + userPoolId: 'testUserPoolId', + webClientId: 'testWebClientId', + authRegion: 'us-east-1', + passwordPolicyMinLength: '8', + passwordPolicyRequirements: + '["REQUIRES_NUMBERS","REQUIRES_LOWERCASE","REQUIRES_UPPERCASE"]', + mfaTypes: '["SMS","TOTP"]', + mfaConfiguration: 'OPTIONAL', + verificationMechanisms: '["email","phone_number"]', + usernameAttributes: '["email"]', + signupAttributes: '["email"]', + allowUnauthenticatedIdentities: 'true', + groups: JSON.stringify(groups), + }, + }, + [graphqlOutputKey]: { + version: '1', + payload: { + awsAppsyncApiEndpoint: 'testApiEndpoint', + awsAppsyncRegion: 'us-east-1', + awsAppsyncAuthenticationType: 'API_KEY', + awsAppsyncAdditionalAuthenticationTypes: 'API_KEY', + awsAppsyncConflictResolutionMode: 'AUTO_MERGE', + awsAppsyncApiKey: 'testApiKey', + awsAppsyncApiId: 'testApiId', + amplifyApiModelSchemaS3Uri: 'testApiSchemaUri', + }, + }, + [customOutputKey]: { + version: '1', + payload: { + customOutputs: JSON.stringify({ + custom: { + output1: 'val1', + output2: 'val2', + }, + }), + }, + }, + }; + const outputRetrieval = mock.fn(async () => stubOutput); + const modelSchemaAdapter = new ModelIntrospectionSchemaAdapter( + stubClientProvider + ); + + mock.method( + modelSchemaAdapter, + 'getModelIntrospectionSchemaFromS3Uri', + () => undefined + ); + const configContributors = new ClientConfigContributorFactory( + modelSchemaAdapter + ).getContributors('1.3'); + const clientConfigGenerator = new UnifiedClientConfigGenerator( + outputRetrieval, + configContributors + ); + const result = await clientConfigGenerator.generateClientConfig(); + const expectedClientConfig: ClientConfig = { + auth: { + user_pool_id: 'testUserPoolId', + aws_region: 'us-east-1', + user_pool_client_id: 'testWebClientId', + identity_pool_id: 'testIdentityPoolId', + mfa_methods: ['SMS', 'TOTP'], + standard_required_attributes: ['email'], + username_attributes: ['email'], + user_verification_types: ['email', 'phone_number'], + mfa_configuration: 'OPTIONAL', + + password_policy: { + min_length: 8, + require_lowercase: true, + require_numbers: true, + require_symbols: false, + require_uppercase: true, + }, + + unauthenticated_identities_enabled: true, + groups: [ + { + ADMINS: { + precedence: 0, + }, + }, + { + EDITORS: { + precedence: 1, + }, + }, + ], + }, + data: { + url: 'testApiEndpoint', + aws_region: 'us-east-1', + api_key: 'testApiKey', + default_authorization_type: 'API_KEY', + authorization_types: ['API_KEY'], + }, + custom: { + output1: 'val1', + output2: 'val2', + }, + version: '1.3', + }; + + assert.deepStrictEqual(result, expectedClientConfig); + }); + void it('transforms backend output into client config for V1.2', async () => { const stubOutput: UnifiedBackendOutput = { [platformOutputKey]: { @@ -406,7 +540,7 @@ void describe('UnifiedClientConfigGenerator', () => { ); const configContributors = new ClientConfigContributorFactory( modelSchemaAdapter - ).getContributors('1.2'); //Generate with new configuration format + ).getContributors('1.3'); //Generate with new configuration format const clientConfigGenerator = new UnifiedClientConfigGenerator( outputRetrieval, configContributors @@ -438,7 +572,7 @@ void describe('UnifiedClientConfigGenerator', () => { output1: 'val1', output2: 'val2', }, - version: '1.2', // The max version prevails + version: '1.3', // The max version prevails }; assert.deepStrictEqual(result, expectedClientConfig); @@ -477,7 +611,7 @@ void describe('UnifiedClientConfigGenerator', () => { ); const configContributors = new ClientConfigContributorFactory( modelSchemaAdapter - ).getContributors('1.2'); + ).getContributors('1.3'); const clientConfigGenerator = new UnifiedClientConfigGenerator( outputRetrieval, @@ -509,7 +643,7 @@ void describe('UnifiedClientConfigGenerator', () => { const configContributors = new ClientConfigContributorFactory( modelSchemaAdapter - ).getContributors('1.2'); + ).getContributors('1.3'); const clientConfigGenerator = new UnifiedClientConfigGenerator( outputRetrieval, @@ -541,7 +675,7 @@ void describe('UnifiedClientConfigGenerator', () => { const configContributors = new ClientConfigContributorFactory( modelSchemaAdapter - ).getContributors('1.2'); + ).getContributors('1.3'); const clientConfigGenerator = new UnifiedClientConfigGenerator( outputRetrieval, @@ -604,7 +738,7 @@ void describe('UnifiedClientConfigGenerator', () => { const configContributors = new ClientConfigContributorFactory( modelSchemaAdapter - ).getContributors('1.2'); + ).getContributors('1.3'); const clientConfigGenerator = new UnifiedClientConfigGenerator( outputRetrieval, @@ -637,7 +771,7 @@ void describe('UnifiedClientConfigGenerator', () => { const configContributors = new ClientConfigContributorFactory( modelSchemaAdapter - ).getContributors('1.2'); + ).getContributors('1.3'); const clientConfigGenerator = new UnifiedClientConfigGenerator( outputRetrieval, diff --git a/packages/deployed-backend-client/CHANGELOG.md b/packages/deployed-backend-client/CHANGELOG.md index 7ee529cc2b..49a70a8190 100644 --- a/packages/deployed-backend-client/CHANGELOG.md +++ b/packages/deployed-backend-client/CHANGELOG.md @@ -1,5 +1,11 @@ # @aws-amplify/deployed-backend-client +## 1.4.2 + +### Patch Changes + +- fdf28bd: fix: detect deploymentType from Stack Tags + ## 1.4.1 ### Patch Changes diff --git a/packages/deployed-backend-client/package.json b/packages/deployed-backend-client/package.json index ef86b6e30e..e1df5238d2 100644 --- a/packages/deployed-backend-client/package.json +++ b/packages/deployed-backend-client/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/deployed-backend-client", - "version": "1.4.1", + "version": "1.4.2", "type": "module", "publishConfig": { "access": "public" diff --git a/packages/deployed-backend-client/src/deployed_backend_client.ts b/packages/deployed-backend-client/src/deployed_backend_client.ts index 94b18def4c..2ff69efe31 100644 --- a/packages/deployed-backend-client/src/deployed_backend_client.ts +++ b/packages/deployed-backend-client/src/deployed_backend_client.ts @@ -15,11 +15,7 @@ import { ListBackendsResponse, } from './deployed_backend_client_factory.js'; import { BackendIdentifierConversions } from '@aws-amplify/platform-core'; -import { - BackendOutputClient, - BackendOutputClientError, - BackendOutputClientErrorType, -} from './backend_output_client_factory.js'; +import { BackendOutputClient } from './backend_output_client_factory.js'; import { CloudFormationClient, DeleteStackCommand, @@ -158,26 +154,13 @@ export class DefaultDeployedBackendClient implements DeployedBackendClient { private tryGetDeploymentType = async ( stackSummary: StackSummary ): Promise => { - const backendIdentifier = { - stackName: stackSummary.StackName as string, - }; + const stackDescription = await this.cfnClient.send( + new DescribeStacksCommand({ StackName: stackSummary.StackName }) + ); - try { - const backendOutput: BackendOutput = - await this.backendOutputClient.getOutput(backendIdentifier); - - return backendOutput[platformOutputKey].payload - .deploymentType as DeploymentType; - } catch (error) { - if ( - (error as BackendOutputClientError).code === - BackendOutputClientErrorType.METADATA_RETRIEVAL_ERROR - ) { - // Ignore stacks where metadata cannot be retrieved. These are not Amplify stacks, or not compatible with this library. - return; - } - throw error; - } + return stackDescription.Stacks?.[0].Tags?.find( + (tag) => tag.Key === 'amplify:deployment-type' + )?.Value as DeploymentType; }; private listStacks = async ( diff --git a/packages/deployed-backend-client/src/deployed_backend_client_list_delete_failed_stacks.test.ts b/packages/deployed-backend-client/src/deployed_backend_client_list_delete_failed_stacks.test.ts index 96afbc73c2..116042ad1c 100644 --- a/packages/deployed-backend-client/src/deployed_backend_client_list_delete_failed_stacks.test.ts +++ b/packages/deployed-backend-client/src/deployed_backend_client_list_delete_failed_stacks.test.ts @@ -6,15 +6,9 @@ import { ListStacksCommand, StackStatus, } from '@aws-sdk/client-cloudformation'; -import { platformOutputKey } from '@aws-amplify/backend-output-schemas'; import { DefaultBackendOutputClient } from './backend_output_client.js'; import { DefaultDeployedBackendClient } from './deployed_backend_client.js'; import { BackendStatus } from './deployed_backend_client_factory.js'; -import { - BackendOutputClientError, - BackendOutputClientErrorType, - StackIdentifier, -} from './index.js'; import { AmplifyClient } from '@aws-sdk/client-amplify'; import { S3 } from '@aws-sdk/client-s3'; import { DeployedResourcesEnumerator } from './deployed-backend-client/deployed_resources_enumerator.js'; @@ -34,14 +28,6 @@ const listStacksMock = { ], }; -const getOutputMockResponse = { - [platformOutputKey]: { - payload: { - deploymentType: 'branch', - }, - }, -}; - void describe('Deployed Backend Client list delete failed stacks', () => { const mockCfnClient = new CloudFormation(); const mockS3Client = new S3(); @@ -56,9 +42,19 @@ void describe('Deployed Backend Client list delete failed stacks', () => { const matchingStack = listStacksMock.StackSummaries.find((stack) => { return stack.StackName === request.input.StackName; }); - const stack = matchingStack; + // Add tags that are used to detect deployment type return { - Stacks: [stack], + Stacks: [ + { + ...matchingStack, + Tags: [ + { + Key: 'amplify:deployment-type', + Value: 'branch', + }, + ], + }, + ], }; } throw request; @@ -83,23 +79,6 @@ void describe('Deployed Backend Client list delete failed stacks', () => { mockCfnClient, new AmplifyClient() ); - const getOutputMock = mock.method( - mockBackendOutputClient, - 'getOutput', - (backendIdentifier: StackIdentifier) => { - if (backendIdentifier.stackName === 'amplify-test-not-a-sandbox') { - return { - ...getOutputMockResponse, - [platformOutputKey]: { - payload: { - deploymentType: 'branch', - }, - }, - }; - } - return getOutputMockResponse; - } - ); const returnedDeleteFailedStacks = [ { deploymentType: 'branch', @@ -116,7 +95,6 @@ void describe('Deployed Backend Client list delete failed stacks', () => { ]; beforeEach(() => { - getOutputMock.mock.resetCalls(); listStacksMockFn.mock.resetCalls(); cfnClientSendMock.mock.resetCalls(); const deployedResourcesEnumerator = new DeployedResourcesEnumerator( @@ -171,98 +149,4 @@ void describe('Deployed Backend Client list delete failed stacks', () => { assert.equal(listStacksMockFn.mock.callCount(), 2); }); - - void it('paginates listBackends when one page contains stacks, but it gets filtered due to not deleted failed status', async () => { - listStacksMockFn.mock.mockImplementationOnce(() => { - return { - StackSummaries: [], - NextToken: 'abc', - }; - }); - const failedStacks = deployedBackendClient.listBackends({ - deploymentType: 'branch', - backendStatusFilters: [BackendStatus.DELETE_FAILED], - }); - assert.deepEqual( - (await failedStacks.getBackendSummaryByPage().next()).value, - returnedDeleteFailedStacks - ); - - assert.equal(listStacksMockFn.mock.callCount(), 2); - }); - - void it('paginates listBackends when one page contains stacks, but it gets filtered due to sandbox deploymentType', async () => { - listStacksMockFn.mock.mockImplementationOnce(() => { - return { - StackSummaries: [], - NextToken: 'abc', - }; - }); - const failedStacks = deployedBackendClient.listBackends({ - deploymentType: 'branch', - backendStatusFilters: [BackendStatus.DELETE_FAILED], - }); - assert.deepEqual( - (await failedStacks.getBackendSummaryByPage().next()).value, - returnedDeleteFailedStacks - ); - - assert.equal(listStacksMockFn.mock.callCount(), 2); - }); - - void it('paginates listBackends when one page contains a stack, but it gets filtered due to not having gen2 outputs', async () => { - getOutputMock.mock.mockImplementationOnce(() => { - throw new BackendOutputClientError( - BackendOutputClientErrorType.METADATA_RETRIEVAL_ERROR, - 'Test metadata retrieval error' - ); - }); - listStacksMockFn.mock.mockImplementationOnce(() => { - return { - StackSummaries: [ - { - StackName: 'amplify-123-name-branch-testHash', - StackStatus: StackStatus.DELETE_FAILED, - CreationTime: new Date(0), - LastUpdatedTime: new Date(1), - }, - ], - NextToken: 'abc', - }; - }); - const failedStacks = deployedBackendClient.listBackends({ - deploymentType: 'branch', - backendStatusFilters: [BackendStatus.DELETE_FAILED], - }); - assert.deepEqual( - (await failedStacks.getBackendSummaryByPage().next()).value, - returnedDeleteFailedStacks - ); - - assert.equal(listStacksMockFn.mock.callCount(), 2); - }); - - void it('does not paginate listBackends when one page throws an unexpected error fetching gen2 outputs', async () => { - getOutputMock.mock.mockImplementationOnce(() => { - throw new Error('Unexpected Error!'); - }); - listStacksMockFn.mock.mockImplementationOnce(() => { - return { - StackSummaries: [ - { - StackName: 'amplify-123-name-branch-testHash', - StackStatus: StackStatus.DELETE_FAILED, - CreationTime: new Date(0), - LastUpdatedTime: new Date(1), - }, - ], - NextToken: 'abc', - }; - }); - const listBackendsPromise = deployedBackendClient.listBackends({ - deploymentType: 'branch', - backendStatusFilters: [BackendStatus.DELETE_FAILED], - }); - await assert.rejects(listBackendsPromise.getBackendSummaryByPage().next()); - }); }); diff --git a/packages/deployed-backend-client/src/deployed_backend_client_list_sandboxes.test.ts b/packages/deployed-backend-client/src/deployed_backend_client_list_sandboxes.test.ts index 12a9e024f9..fb8d690037 100644 --- a/packages/deployed-backend-client/src/deployed_backend_client_list_sandboxes.test.ts +++ b/packages/deployed-backend-client/src/deployed_backend_client_list_sandboxes.test.ts @@ -7,14 +7,8 @@ import { StackStatus, } from '@aws-sdk/client-cloudformation'; import { BackendDeploymentStatus } from './deployed_backend_client_factory.js'; -import { platformOutputKey } from '@aws-amplify/backend-output-schemas'; import { DefaultBackendOutputClient } from './backend_output_client.js'; import { DefaultDeployedBackendClient } from './deployed_backend_client.js'; -import { - BackendOutputClientError, - BackendOutputClientErrorType, - StackIdentifier, -} from './index.js'; import { AmplifyClient } from '@aws-sdk/client-amplify'; import { S3 } from '@aws-sdk/client-s3'; import { DeployedResourcesEnumerator } from './deployed-backend-client/deployed_resources_enumerator.js'; @@ -34,14 +28,6 @@ const listStacksMock = { ], }; -const getOutputMockResponse = { - [platformOutputKey]: { - payload: { - deploymentType: 'sandbox', - }, - }, -}; - void describe('Deployed Backend Client list sandboxes', () => { const mockCfnClient = new CloudFormation(); const mockS3Client = new S3(); @@ -56,9 +42,18 @@ void describe('Deployed Backend Client list sandboxes', () => { const matchingStack = listStacksMock.StackSummaries.find((stack) => { return stack.StackName === request.input.StackName; }); - const stack = matchingStack; return { - Stacks: [stack], + Stacks: [ + { + ...matchingStack, + Tags: [ + { + Key: 'amplify:deployment-type', + Value: 'sandbox', + }, + ], + }, + ], }; } throw request; @@ -84,23 +79,7 @@ void describe('Deployed Backend Client list sandboxes', () => { mockCfnClient, new AmplifyClient() ); - const getOutputMock = mock.method( - mockBackendOutputClient, - 'getOutput', - (backendIdentifier: StackIdentifier) => { - if (backendIdentifier.stackName === 'amplify-test-not-a-sandbox') { - return { - ...getOutputMockResponse, - [platformOutputKey]: { - payload: { - deploymentType: 'branch', - }, - }, - }; - } - return getOutputMockResponse; - } - ); + const returnedSandboxes = [ { deploymentType: 'sandbox', @@ -117,7 +96,6 @@ void describe('Deployed Backend Client list sandboxes', () => { ]; beforeEach(() => { - getOutputMock.mock.resetCalls(); listStacksMockFn.mock.resetCalls(); cfnClientSendMock.mock.resetCalls(); const deployedResourcesEnumerator = new DeployedResourcesEnumerator( @@ -209,57 +187,36 @@ void describe('Deployed Backend Client list sandboxes', () => { assert.equal(listStacksMockFn.mock.callCount(), 2); }); - void it('paginates listBackends when one page contains a stack, but it gets filtered due to not having gen2 outputs', async () => { - getOutputMock.mock.mockImplementationOnce(() => { - throw new BackendOutputClientError( - BackendOutputClientErrorType.METADATA_RETRIEVAL_ERROR, - 'Test metadata retrieval error' - ); - }); - listStacksMockFn.mock.mockImplementationOnce(() => { - return { - StackSummaries: [ - { - StackName: 'amplify-test-name-sandbox-testHash', - StackStatus: StackStatus.CREATE_COMPLETE, - CreationTime: new Date(0), - LastUpdatedTime: new Date(1), - }, - ], - NextToken: 'abc', - }; - }); + void it('filter stacks that do not have deploymentType tag in it', async () => { + cfnClientSendMock.mock.mockImplementation( + (request: ListStacksCommand | DescribeStacksCommand) => { + if (request instanceof ListStacksCommand) { + return listStacksMockFn(request.input); + } + if (request instanceof DescribeStacksCommand) { + const matchingStack = listStacksMock.StackSummaries.find((stack) => { + return stack.StackName === request.input.StackName; + }); + return { + Stacks: [ + { + ...matchingStack, + Tags: [], + }, + ], + }; + } + throw request; + } + ); const sandboxes = deployedBackendClient.listBackends({ deploymentType: 'sandbox', }); assert.deepEqual( - (await sandboxes.getBackendSummaryByPage().next()).value, - returnedSandboxes + (await sandboxes.getBackendSummaryByPage().next()).done, + true ); - assert.equal(listStacksMockFn.mock.callCount(), 2); - }); - - void it('does not paginate listBackends when one page throws an unexpected error fetching gen2 outputs', async () => { - getOutputMock.mock.mockImplementationOnce(() => { - throw new Error('Unexpected Error!'); - }); - listStacksMockFn.mock.mockImplementationOnce(() => { - return { - StackSummaries: [ - { - StackName: 'amplify-test-name-sandbox-testHash', - StackStatus: StackStatus.CREATE_COMPLETE, - CreationTime: new Date(0), - LastUpdatedTime: new Date(1), - }, - ], - NextToken: 'abc', - }; - }); - const listBackendsPromise = deployedBackendClient.listBackends({ - deploymentType: 'sandbox', - }); - await assert.rejects(listBackendsPromise.getBackendSummaryByPage().next()); + assert.equal(listStacksMockFn.mock.callCount(), 1); }); }); diff --git a/packages/eslint-rules/src/index.ts b/packages/eslint-rules/src/index.ts index 88ee363202..2daf628e91 100644 --- a/packages/eslint-rules/src/index.ts +++ b/packages/eslint-rules/src/index.ts @@ -2,9 +2,11 @@ import { noEmptyCatchRule } from './rules/no_empty_catch.js'; import { amplifyErrorNameRule } from './rules/amplify_error_name.js'; import { preferAmplifyErrorsRule } from './rules/prefer_amplify_errors.js'; import { noAmplifyErrors } from './rules/no_amplify_errors.js'; +import { amplifyErrorNoInstanceOf } from './rules/amplify_error_no_instance_of'; export const rules: Record = { 'amplify-error-name': amplifyErrorNameRule, + 'amplify-error-no-instanceof': amplifyErrorNoInstanceOf, 'no-empty-catch': noEmptyCatchRule, 'prefer-amplify-errors': preferAmplifyErrorsRule, 'no-amplify-errors': noAmplifyErrors, @@ -15,6 +17,7 @@ export const configs = { plugins: ['amplify-backend-rules'], rules: { 'amplify-backend-rules/amplify-error-name': 'error', + 'amplify-backend-rules/amplify-error-no-instanceof': 'error', 'amplify-backend-rules/no-empty-catch': 'error', 'amplify-backend-rules/prefer-amplify-errors': 'off', 'amplify-backend-rules/no-amplify-errors': 'off', diff --git a/packages/eslint-rules/src/rules/amplify_error_no_instance_of.test.ts b/packages/eslint-rules/src/rules/amplify_error_no_instance_of.test.ts new file mode 100644 index 0000000000..c805e88fbd --- /dev/null +++ b/packages/eslint-rules/src/rules/amplify_error_no_instance_of.test.ts @@ -0,0 +1,28 @@ +import * as nodeTest from 'node:test'; +import { RuleTester } from '@typescript-eslint/rule-tester'; +import { amplifyErrorNoInstanceOf } from './amplify_error_no_instance_of.js'; + +RuleTester.afterAll = nodeTest.after; +// See https://typescript-eslint.io/packages/rule-tester/#with-specific-frameworks +// Node test runner methods return promises which are not relevant in the context of testing. +// We do ignore them in other places with void keyword. +// eslint-disable-next-line @typescript-eslint/no-misused-promises +RuleTester.it = nodeTest.it; +// eslint-disable-next-line @typescript-eslint/no-misused-promises +RuleTester.describe = nodeTest.describe; + +const ruleTester = new RuleTester(); + +ruleTester.run('amplify-error-no-instanceof', amplifyErrorNoInstanceOf, { + valid: ['e instanceof Error'], + invalid: [ + { + code: 'e instanceof AmplifyError', + errors: [ + { + messageId: 'noInstanceOfWithAmplifyError', + }, + ], + }, + ], +}); diff --git a/packages/eslint-rules/src/rules/amplify_error_no_instance_of.ts b/packages/eslint-rules/src/rules/amplify_error_no_instance_of.ts new file mode 100644 index 0000000000..bd040d2134 --- /dev/null +++ b/packages/eslint-rules/src/rules/amplify_error_no_instance_of.ts @@ -0,0 +1,41 @@ +import { ESLintUtils } from '@typescript-eslint/utils'; + +/** + * This rule flags empty catch blocks. Even if they contain comments. + * + * This rule differs from built in https://github.com/eslint/eslint/blob/main/lib/rules/no-empty.js + * in such a way that it uses typescript-eslint and typescript AST + * which does not include comments as statements in catch clause body block. + */ +export const amplifyErrorNoInstanceOf = ESLintUtils.RuleCreator.withoutDocs({ + create(context) { + return { + // This naming comes from @typescript-eslint/utils types. + // eslint-disable-next-line @typescript-eslint/naming-convention + BinaryExpression(node) { + if ( + node.operator === 'instanceof' && + node.right.type === 'Identifier' && + node.right.name === 'AmplifyError' + ) { + context.report({ + messageId: 'noInstanceOfWithAmplifyError', + node, + }); + } + }, + }; + }, + meta: { + docs: { + description: 'Instanceof operator must not be used with AmplifyError.', + }, + messages: { + noInstanceOfWithAmplifyError: + 'Do not use instanceof with AmplifyError. Use AmplifyError.isAmplifyError instead.', + }, + type: 'problem', + schema: [], + }, + defaultOptions: [], +}); diff --git a/packages/integration-tests/CHANGELOG.md b/packages/integration-tests/CHANGELOG.md index 24616d3696..e06cf49a9d 100644 --- a/packages/integration-tests/CHANGELOG.md +++ b/packages/integration-tests/CHANGELOG.md @@ -1,5 +1,15 @@ # @aws-amplify/integration-tests +## 0.6.0 + +### Minor Changes + +- 11d62fe: Add support for custom Lambda function email senders in Auth construct + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 + ## 0.5.10 ### Patch Changes diff --git a/packages/integration-tests/README.md b/packages/integration-tests/README.md index 780c9a1929..f5e3f4b709 100644 --- a/packages/integration-tests/README.md +++ b/packages/integration-tests/README.md @@ -17,14 +17,25 @@ or `npm run test:dir packages/integration-tests/lib/test-in-memory` (to run them The create-amplify e2e suite tests the first-time installation and setup of a new amplify backend project. To run this suite, run `npm run test:dir packages/integration-tests/lib/test-e2e/create_amplify.test.js` -## deployment tests +## deployment and sandbox tests -To run end-to-end deployment tests, credentials to an AWS account must be available on the machine. Any credentials that will be picked up by the +To run end-to-end deployment or sandbox tests, credentials to an AWS account must be available on the machine. Any credentials that will be picked up by the [default node credential provider](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/setting-credentials-node.html) should work. -This include setting environment variables for a default profile. +This includes setting environment variables for a default profile. -To run this suite, run -`npm run test:dir packages/integration-tests/lib/test-e2e/deployment.test.js` +To run deployment suite, run +`npm run test:dir packages/integration-tests/lib/test-e2e/deployment/*.deployment.test.js` + +To run sandbox suite, run +`npm run test:dir packages/integration-tests/lib/test-e2e/sandbox/*.sandbox.test.js` + +To run deployment or sandbox test for specific project, specify exact test file, for example +`npm run test:dir packages/integration-tests/lib/test-e2e/sandbox/data_storage_auth_with_triggers.sandbox.test.js` + +When working locally with sandbox tests, it is sometimes useful to retain deployment of test project to avoid full re-deployments while working +on single test project incrementally. To retain deployment set `AMPLIFY_BACKEND_TESTS_RETAIN_TEST_PROJECT_DEPLOYMENT` environment +variable to `true`. This flag disables project name randomization and deployment cleanup, so that subsequent runs of same test +target the same CFN stacks. This option is not available for deployment tests (hotswap is not going to work there anyway). ## backend-output tests diff --git a/packages/integration-tests/package.json b/packages/integration-tests/package.json index a8199c7a1a..ce7b456a1c 100644 --- a/packages/integration-tests/package.json +++ b/packages/integration-tests/package.json @@ -1,20 +1,20 @@ { "name": "@aws-amplify/integration-tests", "private": true, - "version": "0.5.10", + "version": "0.6.0", "type": "module", "devDependencies": { "@apollo/client": "^3.10.1", - "@aws-amplify/ai-constructs": "^0.4.0", - "@aws-amplify/auth-construct": "^1.3.1", - "@aws-amplify/backend": "^1.4.0", - "@aws-amplify/backend-ai": "^0.3.0", - "@aws-amplify/backend-secret": "^1.1.2", - "@aws-amplify/client-config": "^1.4.0", + "@aws-amplify/ai-constructs": "^0.8.0", + "@aws-amplify/auth-construct": "^1.4.0", + "@aws-amplify/backend": "^1.6.0", + "@aws-amplify/backend-ai": "^0.3.5", + "@aws-amplify/backend-secret": "^1.1.4", + "@aws-amplify/client-config": "^1.5.1", "@aws-amplify/data-schema": "^1.0.0", "@aws-amplify/deployed-backend-client": "^1.4.1", "@aws-amplify/platform-core": "^1.1.0", - "@aws-amplify/plugin-types": "^1.2.2", + "@aws-amplify/plugin-types": "^1.3.1", "@aws-sdk/client-accessanalyzer": "^3.624.0", "@aws-sdk/client-amplify": "^3.624.0", "@aws-sdk/client-bedrock-runtime": "^3.622.0", @@ -30,9 +30,10 @@ "@aws-sdk/credential-providers": "^3.624.0", "@smithy/shared-ini-file-loader": "^2.2.5", "@types/lodash.ismatch": "^4.4.9", + "@zip.js/zip.js": "^2.7.52", "aws-amplify": "^6.0.16", "aws-appsync-auth-link": "^3.0.7", - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0", "execa": "^8.0.1", "fs-extra": "^11.1.1", diff --git a/packages/integration-tests/src/amplify_auth_credentials_factory.ts b/packages/integration-tests/src/amplify_auth_credentials_factory.ts index 7faffce066..d21f9f8d86 100644 --- a/packages/integration-tests/src/amplify_auth_credentials_factory.ts +++ b/packages/integration-tests/src/amplify_auth_credentials_factory.ts @@ -33,7 +33,7 @@ export class AmplifyAuthCredentialsFactory { */ constructor( private readonly cognitoIdentityProviderClient: CognitoIdentityProviderClient, - authConfig: NonNullable['auth']> + authConfig: NonNullable['auth']> ) { if (!authConfig.identity_pool_id) { throw new Error('Client config must have identity pool id.'); diff --git a/packages/integration-tests/src/find_deployed_resource.ts b/packages/integration-tests/src/find_deployed_resource.ts index ef8830e680..933415ddd3 100644 --- a/packages/integration-tests/src/find_deployed_resource.ts +++ b/packages/integration-tests/src/find_deployed_resource.ts @@ -4,6 +4,7 @@ import { DescribeStackResourcesCommand, } from '@aws-sdk/client-cloudformation'; import { BackendIdentifierConversions } from '@aws-amplify/platform-core'; +import { e2eToolingClientConfig } from './e2e_tooling_client_config.js'; export type StringPredicate = (str: string) => boolean; @@ -14,7 +15,11 @@ export class DeployedResourcesFinder { /** * Construct with a cfnClient */ - constructor(private readonly cfnClient: CloudFormationClient) {} + constructor( + private readonly cfnClient: CloudFormationClient = new CloudFormationClient( + e2eToolingClientConfig + ) + ) {} /** * Find resources of type "resourceType" within the stack defined by "backendId" diff --git a/packages/integration-tests/src/process-controller/execa_process_killer.ts b/packages/integration-tests/src/process-controller/execa_process_killer.ts index 0f55e04c6f..945b3ba38d 100644 --- a/packages/integration-tests/src/process-controller/execa_process_killer.ts +++ b/packages/integration-tests/src/process-controller/execa_process_killer.ts @@ -14,8 +14,20 @@ export const killExecaProcess = async (processInstance: ExecaChildProcess) => { // turns out killing child process on Windows is a huge PITA // https://stackoverflow.com/questions/23706055/why-can-i-not-kill-my-child-process-in-nodejs-on-windows // https://github.com/sindresorhus/execa#killsignal-options - // eslint-disable-next-line spellcheck/spell-checker - await execa('taskkill', ['/pid', `${processInstance.pid}`, '/f', '/t']); + try { + // eslint-disable-next-line spellcheck/spell-checker + await execa('taskkill', ['/pid', `${processInstance.pid}`, '/f', '/t']); + } catch (e) { + // if process doesn't exist it means that it managed to exit gracefully by now. + // so don't fail in that case. + const isProcessNotFoundError = + e instanceof Error && + (e.message.includes('not found') || + e.message.includes('There is no running instance of the task')); + if (!isProcessNotFoundError) { + throw e; + } + } } else { processInstance.kill('SIGINT'); } diff --git a/packages/integration-tests/src/process-controller/predicated_action_macros.ts b/packages/integration-tests/src/process-controller/predicated_action_macros.ts index 7f38d4789b..4fff93a726 100644 --- a/packages/integration-tests/src/process-controller/predicated_action_macros.ts +++ b/packages/integration-tests/src/process-controller/predicated_action_macros.ts @@ -40,16 +40,6 @@ export const confirmDeleteSandbox = () => ) .sendYes(); -/** - * Reusable predicated action: Wait for sandbox to prompt on quitting to delete all the resource and respond with no - */ -export const rejectCleanupSandbox = () => - new PredicatedActionBuilder() - .waitForLineIncludes( - 'Would you like to delete all the resources in your sandbox environment' - ) - .sendNo(); - /** * Reusable predicated action: Wait for sandbox to become idle, * then perform the specified file replacements in the backend code which will trigger sandbox again @@ -59,9 +49,10 @@ export const replaceFiles = (replacements: CopyDefinition[]) => { }; /** - * Reusable predicated action: Wait for sandbox to become idle and then quit it (CTRL-C) + * Reusable predicated action: Wait for sandbox to become idle and config to be generated and then quit it (CTRL-C) */ -export const interruptSandbox = () => waitForSandboxToBecomeIdle().sendCtrlC(); +export const interruptSandbox = () => + waitForConfigUpdateAfterDeployment().sendCtrlC(); /** * Reusable predicated action: Wait for sandbox to finish deployment and assert that the deployment time is less diff --git a/packages/integration-tests/src/setup_test_directory.ts b/packages/integration-tests/src/setup_test_directory.ts index 0939e5709c..8cddf8b2cf 100644 --- a/packages/integration-tests/src/setup_test_directory.ts +++ b/packages/integration-tests/src/setup_test_directory.ts @@ -22,6 +22,13 @@ export const createTestDirectory = async (pathName: string | URL) => { * Delete a test directory. */ export const deleteTestDirectory = async (pathName: string | URL) => { + if (process.env.CI) { + // We don't have to delete test directories in CI. + // The VMs are ephemeral. + // On the other hand we want to keep shared parent directories for test projects + // for tests executing in parallel on the same VM. + return; + } if (existsSync(pathName)) { await fs.rm(pathName, { recursive: true, force: true }); } diff --git a/packages/integration-tests/src/test-e2e/deployment.test.ts b/packages/integration-tests/src/test-e2e/deployment.test.ts deleted file mode 100644 index c9ea7500de..0000000000 --- a/packages/integration-tests/src/test-e2e/deployment.test.ts +++ /dev/null @@ -1,191 +0,0 @@ -import { after, afterEach, before, beforeEach, describe, it } from 'node:test'; -import { - createTestDirectory, - deleteTestDirectory, - rootTestDir, -} from '../setup_test_directory.js'; -import fs from 'fs/promises'; -import { shortUuid } from '../short_uuid.js'; -import { getTestProjectCreators } from '../test-project-setup/test_project_creator.js'; -import { TestProjectBase } from '../test-project-setup/test_project_base.js'; -import { PredicatedActionBuilder } from '../process-controller/predicated_action_queue_builder.js'; -import { ampxCli } from '../process-controller/process_controller.js'; -import path from 'path'; -import { - interruptSandbox, - rejectCleanupSandbox, -} from '../process-controller/predicated_action_macros.js'; -import assert from 'node:assert'; -import { TestBranch, amplifyAppPool } from '../amplify_app_pool.js'; -import { BackendIdentifier } from '@aws-amplify/plugin-types'; -import { ClientConfigFormat } from '@aws-amplify/client-config'; -import { testConcurrencyLevel } from './test_concurrency.js'; -import { TestCdkProjectBase } from '../test-project-setup/cdk/test_cdk_project_base.js'; -import { getTestCdkProjectCreators } from '../test-project-setup/cdk/test_cdk_project_creator.js'; - -const testProjectCreators = getTestProjectCreators(); -const testCdkProjectCreators = getTestCdkProjectCreators(); -void describe('deployment tests', { concurrency: testConcurrencyLevel }, () => { - before(async () => { - await createTestDirectory(rootTestDir); - }); - after(async () => { - await deleteTestDirectory(rootTestDir); - }); - - void describe('amplify deploys', async () => { - testProjectCreators.forEach((testProjectCreator) => { - void describe(`branch deploys ${testProjectCreator.name}`, () => { - let branchBackendIdentifier: BackendIdentifier; - let testBranch: TestBranch; - let testProject: TestProjectBase; - - beforeEach(async () => { - testProject = await testProjectCreator.createProject(rootTestDir); - testBranch = await amplifyAppPool.createTestBranch(); - branchBackendIdentifier = { - namespace: testBranch.appId, - name: testBranch.branchName, - type: 'branch', - }; - }); - - afterEach(async () => { - await testProject.tearDown(branchBackendIdentifier); - }); - - void it(`[${testProjectCreator.name}] deploys fully`, async () => { - await testProject.deploy(branchBackendIdentifier); - await testProject.assertPostDeployment(branchBackendIdentifier); - const testBranchDetails = await amplifyAppPool.fetchTestBranchDetails( - testBranch - ); - assert.ok( - testBranchDetails.backend?.stackArn, - 'branch should have stack associated' - ); - assert.ok( - testBranchDetails.backend?.stackArn?.includes( - branchBackendIdentifier.namespace - ) - ); - assert.ok( - testBranchDetails.backend?.stackArn?.includes( - branchBackendIdentifier.name - ) - ); - - // test generating all client formats - for (const format of [ - ClientConfigFormat.DART, - ClientConfigFormat.JSON, - ]) { - await ampxCli( - [ - 'generate', - 'outputs', - '--branch', - testBranch.branchName, - '--app-id', - testBranch.appId, - '--format', - format, - ], - testProject.projectDirPath - ).run(); - - await testProject.assertClientConfigExists( - testProject.projectDirPath, - format - ); - } - }); - }); - }); - - void describe('fails on compilation error', async () => { - let testProject: TestProjectBase; - before(async () => { - // any project is fine - testProject = await testProjectCreators[0].createProject(rootTestDir); - await fs.cp( - testProject.sourceProjectAmplifyDirURL, - testProject.projectAmplifyDirPath, - { - recursive: true, - } - ); - - // inject failure - await fs.appendFile( - path.join(testProject.projectAmplifyDirPath, 'backend.ts'), - "this won't compile" - ); - }); - - void describe('in sequence', { concurrency: false }, () => { - void it('in sandbox deploy', async () => { - await ampxCli( - ['sandbox', '--dirToWatch', 'amplify'], - testProject.projectDirPath - ) - .do( - new PredicatedActionBuilder().waitForLineIncludes( - 'TypeScript validation check failed' - ) - ) - .do(interruptSandbox()) - .do(rejectCleanupSandbox()) - .run(); - }); - - void it('in pipeline deploy', async () => { - await assert.rejects(() => - ampxCli( - [ - 'pipeline-deploy', - '--branch', - 'test-branch', - '--app-id', - `test-${shortUuid()}`, - ], - testProject.projectDirPath, - { - env: { CI: 'true' }, - } - ) - .do( - new PredicatedActionBuilder().waitForLineIncludes( - 'TypeScript validation check failed' - ) - ) - .run() - ); - }); - }); - }); - }); - - void describe('cdk deploys', () => { - testCdkProjectCreators.forEach((testCdkProjectCreator) => { - void describe(`${testCdkProjectCreator.name}`, () => { - let testCdkProject: TestCdkProjectBase; - - beforeEach(async () => { - testCdkProject = await testCdkProjectCreator.createProject( - rootTestDir - ); - }); - - afterEach(async () => { - await testCdkProject.tearDown(); - }); - - void it(`deploys`, async () => { - await testCdkProject.deploy(); - await testCdkProject.assertPostDeployment(); - }); - }); - }); - }); -}); diff --git a/packages/integration-tests/src/test-e2e/deployment/access_testing_project.deployment.test.ts b/packages/integration-tests/src/test-e2e/deployment/access_testing_project.deployment.test.ts new file mode 100644 index 0000000000..6730123191 --- /dev/null +++ b/packages/integration-tests/src/test-e2e/deployment/access_testing_project.deployment.test.ts @@ -0,0 +1,4 @@ +import { AccessTestingProjectTestProjectCreator } from '../../test-project-setup/access_testing_project.js'; +import { defineDeploymentTest } from './deployment.test.template.js'; + +defineDeploymentTest(new AccessTestingProjectTestProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/deployment/auth_cdk_project.deployment.test.ts b/packages/integration-tests/src/test-e2e/deployment/auth_cdk_project.deployment.test.ts new file mode 100644 index 0000000000..2b6f324624 --- /dev/null +++ b/packages/integration-tests/src/test-e2e/deployment/auth_cdk_project.deployment.test.ts @@ -0,0 +1,4 @@ +import { defineCdkDeploymentTest } from './cdk.deployment.test.template.js'; +import { AuthTestCdkProjectCreator } from '../../test-project-setup/cdk/auth_cdk_project.js'; + +defineCdkDeploymentTest(new AuthTestCdkProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/deployment/cdk.deployment.test.template.ts b/packages/integration-tests/src/test-e2e/deployment/cdk.deployment.test.template.ts new file mode 100644 index 0000000000..1bfa79ce14 --- /dev/null +++ b/packages/integration-tests/src/test-e2e/deployment/cdk.deployment.test.template.ts @@ -0,0 +1,50 @@ +import { after, afterEach, before, beforeEach, describe, it } from 'node:test'; +import { + createTestDirectory, + deleteTestDirectory, + rootTestDir, +} from '../../setup_test_directory.js'; +import { testConcurrencyLevel } from '../test_concurrency.js'; +import { TestCdkProjectBase } from '../../test-project-setup/cdk/test_cdk_project_base.js'; +import { TestCdkProjectCreator } from '../../test-project-setup/cdk/test_cdk_project_creator.js'; + +/** + * Defines cdk deployment test + */ +export const defineCdkDeploymentTest = ( + testCdkProjectCreator: TestCdkProjectCreator +) => { + void describe( + 'cdk deployment tests', + { concurrency: testConcurrencyLevel }, + () => { + before(async () => { + await createTestDirectory(rootTestDir); + }); + after(async () => { + await deleteTestDirectory(rootTestDir); + }); + + void describe('cdk deploys', () => { + void describe(`${testCdkProjectCreator.name}`, () => { + let testCdkProject: TestCdkProjectBase; + + beforeEach(async () => { + testCdkProject = await testCdkProjectCreator.createProject( + rootTestDir + ); + }); + + afterEach(async () => { + await testCdkProject.tearDown(); + }); + + void it(`deploys`, async () => { + await testCdkProject.deploy(); + await testCdkProject.assertPostDeployment(); + }); + }); + }); + } + ); +}; diff --git a/packages/integration-tests/src/test-e2e/deployment/conversation_handler_project.deployment.test.ts b/packages/integration-tests/src/test-e2e/deployment/conversation_handler_project.deployment.test.ts new file mode 100644 index 0000000000..b26f10daea --- /dev/null +++ b/packages/integration-tests/src/test-e2e/deployment/conversation_handler_project.deployment.test.ts @@ -0,0 +1,4 @@ +import { ConversationHandlerTestProjectCreator } from '../../test-project-setup/conversation_handler_project.js'; +import { defineDeploymentTest } from './deployment.test.template.js'; + +defineDeploymentTest(new ConversationHandlerTestProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/deployment/custom_outputs.deployment.test.ts b/packages/integration-tests/src/test-e2e/deployment/custom_outputs.deployment.test.ts new file mode 100644 index 0000000000..4654bac4f4 --- /dev/null +++ b/packages/integration-tests/src/test-e2e/deployment/custom_outputs.deployment.test.ts @@ -0,0 +1,4 @@ +import { CustomOutputsTestProjectCreator } from '../../test-project-setup/custom_outputs.js'; +import { defineDeploymentTest } from './deployment.test.template.js'; + +defineDeploymentTest(new CustomOutputsTestProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/deployment/data_storage_auth_with_triggers.deployment.test.ts b/packages/integration-tests/src/test-e2e/deployment/data_storage_auth_with_triggers.deployment.test.ts new file mode 100644 index 0000000000..0fd2ea5062 --- /dev/null +++ b/packages/integration-tests/src/test-e2e/deployment/data_storage_auth_with_triggers.deployment.test.ts @@ -0,0 +1,4 @@ +import { DataStorageAuthWithTriggerTestProjectCreator } from '../../test-project-setup/data_storage_auth_with_triggers.js'; +import { defineDeploymentTest } from './deployment.test.template.js'; + +defineDeploymentTest(new DataStorageAuthWithTriggerTestProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/deployment/deployment.test.template.ts b/packages/integration-tests/src/test-e2e/deployment/deployment.test.template.ts new file mode 100644 index 0000000000..4a17afb9b0 --- /dev/null +++ b/packages/integration-tests/src/test-e2e/deployment/deployment.test.template.ts @@ -0,0 +1,169 @@ +import { after, afterEach, before, beforeEach, describe, it } from 'node:test'; +import { + createTestDirectory, + deleteTestDirectory, + rootTestDir, +} from '../../setup_test_directory.js'; +import fs from 'fs/promises'; +import { shortUuid } from '../../short_uuid.js'; +import { TestProjectCreator } from '../../test-project-setup/test_project_creator.js'; +import { TestProjectBase } from '../../test-project-setup/test_project_base.js'; +import { PredicatedActionBuilder } from '../../process-controller/predicated_action_queue_builder.js'; +import { ampxCli } from '../../process-controller/process_controller.js'; +import path from 'path'; +import { waitForSandboxToBecomeIdle } from '../../process-controller/predicated_action_macros.js'; +import assert from 'node:assert'; +import { TestBranch, amplifyAppPool } from '../../amplify_app_pool.js'; +import { BackendIdentifier } from '@aws-amplify/plugin-types'; +import { ClientConfigFormat } from '@aws-amplify/client-config'; +import { testConcurrencyLevel } from '../test_concurrency.js'; + +/** + * Defines deployment test + */ +export const defineDeploymentTest = ( + testProjectCreator: TestProjectCreator +) => { + void describe( + 'deployment tests', + { concurrency: testConcurrencyLevel }, + () => { + before(async () => { + await createTestDirectory(rootTestDir); + }); + after(async () => { + await deleteTestDirectory(rootTestDir); + }); + + void describe(`branch deploys ${testProjectCreator.name}`, () => { + let branchBackendIdentifier: BackendIdentifier; + let testBranch: TestBranch; + let testProject: TestProjectBase; + + beforeEach(async () => { + testProject = await testProjectCreator.createProject(rootTestDir); + testBranch = await amplifyAppPool.createTestBranch(); + branchBackendIdentifier = { + namespace: testBranch.appId, + name: testBranch.branchName, + type: 'branch', + }; + }); + + afterEach(async () => { + await testProject.tearDown(branchBackendIdentifier); + }); + + void it(`[${testProjectCreator.name}] deploys fully`, async () => { + await testProject.deploy(branchBackendIdentifier); + await testProject.assertPostDeployment(branchBackendIdentifier); + const testBranchDetails = await amplifyAppPool.fetchTestBranchDetails( + testBranch + ); + assert.ok( + testBranchDetails.backend?.stackArn, + 'branch should have stack associated' + ); + assert.ok( + testBranchDetails.backend?.stackArn?.includes( + branchBackendIdentifier.namespace + ) + ); + assert.ok( + testBranchDetails.backend?.stackArn?.includes( + branchBackendIdentifier.name + ) + ); + + // test generating all client formats + for (const format of [ + ClientConfigFormat.DART, + ClientConfigFormat.JSON, + ]) { + await ampxCli( + [ + 'generate', + 'outputs', + '--branch', + testBranch.branchName, + '--app-id', + testBranch.appId, + '--format', + format, + ], + testProject.projectDirPath + ).run(); + + await testProject.assertClientConfigExists( + testProject.projectDirPath, + format + ); + } + }); + }); + + void describe('fails on compilation error', async () => { + let testProject: TestProjectBase; + before(async () => { + // any project is fine + testProject = await testProjectCreator.createProject(rootTestDir); + await fs.cp( + testProject.sourceProjectAmplifyDirURL, + testProject.projectAmplifyDirPath, + { + recursive: true, + } + ); + + // inject failure + await fs.appendFile( + path.join(testProject.projectAmplifyDirPath, 'backend.ts'), + "this won't compile" + ); + }); + + void describe('in sequence', { concurrency: false }, () => { + void it('in sandbox deploy', async () => { + const predicatedActionBuilder = new PredicatedActionBuilder(); + await ampxCli( + ['sandbox', '--dirToWatch', 'amplify'], + testProject.projectDirPath + ) + .do( + predicatedActionBuilder.waitForLineIncludes( + 'TypeScript validation check failed' + ) + ) + .do(waitForSandboxToBecomeIdle()) + .do(predicatedActionBuilder.sendCtrlC()) + .run(); + }); + + void it('in pipeline deploy', async () => { + await assert.rejects(() => + ampxCli( + [ + 'pipeline-deploy', + '--branch', + 'test-branch', + '--app-id', + `test-${shortUuid()}`, + ], + testProject.projectDirPath, + { + env: { CI: 'true' }, + } + ) + .do( + new PredicatedActionBuilder().waitForLineIncludes( + 'TypeScript validation check failed' + ) + ) + .run() + ); + }); + }); + }); + } + ); +}; diff --git a/packages/integration-tests/src/test-e2e/deployment/minimal_with_typescript_idioms.deployment.test.ts b/packages/integration-tests/src/test-e2e/deployment/minimal_with_typescript_idioms.deployment.test.ts new file mode 100644 index 0000000000..af3e619d9d --- /dev/null +++ b/packages/integration-tests/src/test-e2e/deployment/minimal_with_typescript_idioms.deployment.test.ts @@ -0,0 +1,4 @@ +import { MinimalWithTypescriptIdiomTestProjectCreator } from '../../test-project-setup/minimal_with_typescript_idioms.js'; +import { defineDeploymentTest } from './deployment.test.template.js'; + +defineDeploymentTest(new MinimalWithTypescriptIdiomTestProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/sandbox.test.ts b/packages/integration-tests/src/test-e2e/sandbox.test.ts deleted file mode 100644 index 394b3c8b3e..0000000000 --- a/packages/integration-tests/src/test-e2e/sandbox.test.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { after, before, describe, it } from 'node:test'; -import { - createTestDirectory, - deleteTestDirectory, - rootTestDir, -} from '../setup_test_directory.js'; -import { getTestProjectCreators } from '../test-project-setup/test_project_creator.js'; -import { TestProjectBase } from '../test-project-setup/test_project_base.js'; -import { userInfo } from 'os'; -import { ampxCli } from '../process-controller/process_controller.js'; -import { - ensureDeploymentTimeLessThan, - interruptSandbox, - rejectCleanupSandbox, - replaceFiles, - waitForConfigUpdateAfterDeployment, -} from '../process-controller/predicated_action_macros.js'; -import { BackendIdentifier } from '@aws-amplify/plugin-types'; -import { testConcurrencyLevel } from './test_concurrency.js'; -import { - amplifySharedSecretNameKey, - createAmplifySharedSecretName, -} from '../shared_secret.js'; - -const testProjectCreators = getTestProjectCreators(); -void describe('sandbox tests', { concurrency: testConcurrencyLevel }, () => { - before(async () => { - await createTestDirectory(rootTestDir); - }); - after(async () => { - await deleteTestDirectory(rootTestDir); - }); - - void describe('amplify deploys', async () => { - testProjectCreators.forEach((testProjectCreator) => { - void describe(`sandbox deploys ${testProjectCreator.name}`, () => { - let testProject: TestProjectBase; - let sandboxBackendIdentifier: BackendIdentifier; - - before(async () => { - testProject = await testProjectCreator.createProject(rootTestDir); - sandboxBackendIdentifier = { - type: 'sandbox', - namespace: testProject.name, - name: userInfo().username, - }; - }); - - after(async () => { - await testProject.tearDown(sandboxBackendIdentifier); - }); - - void describe('in sequence', { concurrency: false }, () => { - const sharedSecretsEnv = { - [amplifySharedSecretNameKey]: createAmplifySharedSecretName(), - }; - void it(`[${testProjectCreator.name}] deploys fully`, async () => { - await testProject.deploy( - sandboxBackendIdentifier, - sharedSecretsEnv - ); - await testProject.assertPostDeployment(sandboxBackendIdentifier); - }); - - void it('generates config after sandbox --once deployment', async () => { - const processController = ampxCli( - ['sandbox', '--once'], - testProject.projectDirPath, - { - env: sharedSecretsEnv, - } - ); - await processController - .do(waitForConfigUpdateAfterDeployment()) - .run(); - - await testProject.assertPostDeployment(sandboxBackendIdentifier); - }); - - void it(`[${testProjectCreator.name}] hot-swaps a change`, async () => { - const updates = await testProject.getUpdates(); - if (updates.length > 0) { - const processController = ampxCli( - ['sandbox', '--dirToWatch', 'amplify'], - testProject.projectDirPath, - { - env: sharedSecretsEnv, - } - ); - - for (const update of updates) { - processController - .do(replaceFiles(update.replacements)) - .do(ensureDeploymentTimeLessThan(update.deployThresholdSec)); - } - - // Execute the process. - await processController - .do(interruptSandbox()) - .do(rejectCleanupSandbox()) - .run(); - - await testProject.assertPostDeployment(sandboxBackendIdentifier); - } - }); - }); - }); - }); - }); -}); diff --git a/packages/integration-tests/src/test-e2e/sandbox/access_testing_project.sandbox.test.ts b/packages/integration-tests/src/test-e2e/sandbox/access_testing_project.sandbox.test.ts new file mode 100644 index 0000000000..42fc2460d1 --- /dev/null +++ b/packages/integration-tests/src/test-e2e/sandbox/access_testing_project.sandbox.test.ts @@ -0,0 +1,4 @@ +import { defineSandboxTest } from './sandbox.test.template.js'; +import { AccessTestingProjectTestProjectCreator } from '../../test-project-setup/access_testing_project.js'; + +defineSandboxTest(new AccessTestingProjectTestProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/sandbox/conversation_handler_project.sandbox.test.ts b/packages/integration-tests/src/test-e2e/sandbox/conversation_handler_project.sandbox.test.ts new file mode 100644 index 0000000000..b4ee374c49 --- /dev/null +++ b/packages/integration-tests/src/test-e2e/sandbox/conversation_handler_project.sandbox.test.ts @@ -0,0 +1,4 @@ +import { defineSandboxTest } from './sandbox.test.template.js'; +import { ConversationHandlerTestProjectCreator } from '../../test-project-setup/conversation_handler_project.js'; + +defineSandboxTest(new ConversationHandlerTestProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/sandbox/custom_outputs.sandbox.test.ts b/packages/integration-tests/src/test-e2e/sandbox/custom_outputs.sandbox.test.ts new file mode 100644 index 0000000000..17f47a7efb --- /dev/null +++ b/packages/integration-tests/src/test-e2e/sandbox/custom_outputs.sandbox.test.ts @@ -0,0 +1,4 @@ +import { defineSandboxTest } from './sandbox.test.template.js'; +import { CustomOutputsTestProjectCreator } from '../../test-project-setup/custom_outputs.js'; + +defineSandboxTest(new CustomOutputsTestProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/sandbox/data_storage_auth_with_triggers.sandbox.test.ts b/packages/integration-tests/src/test-e2e/sandbox/data_storage_auth_with_triggers.sandbox.test.ts new file mode 100644 index 0000000000..b413244445 --- /dev/null +++ b/packages/integration-tests/src/test-e2e/sandbox/data_storage_auth_with_triggers.sandbox.test.ts @@ -0,0 +1,4 @@ +import { defineSandboxTest } from './sandbox.test.template.js'; +import { DataStorageAuthWithTriggerTestProjectCreator } from '../../test-project-setup/data_storage_auth_with_triggers.js'; + +defineSandboxTest(new DataStorageAuthWithTriggerTestProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/sandbox/minimal_with_typescript_idioms.sandbox.test.ts b/packages/integration-tests/src/test-e2e/sandbox/minimal_with_typescript_idioms.sandbox.test.ts new file mode 100644 index 0000000000..3f19b529d5 --- /dev/null +++ b/packages/integration-tests/src/test-e2e/sandbox/minimal_with_typescript_idioms.sandbox.test.ts @@ -0,0 +1,4 @@ +import { defineSandboxTest } from './sandbox.test.template.js'; +import { MinimalWithTypescriptIdiomTestProjectCreator } from '../../test-project-setup/minimal_with_typescript_idioms.js'; + +defineSandboxTest(new MinimalWithTypescriptIdiomTestProjectCreator()); diff --git a/packages/integration-tests/src/test-e2e/sandbox/sandbox.test.template.ts b/packages/integration-tests/src/test-e2e/sandbox/sandbox.test.template.ts new file mode 100644 index 0000000000..9b24b0926d --- /dev/null +++ b/packages/integration-tests/src/test-e2e/sandbox/sandbox.test.template.ts @@ -0,0 +1,108 @@ +import { after, before, describe, it } from 'node:test'; +import { + createTestDirectory, + deleteTestDirectory, + rootTestDir, +} from '../../setup_test_directory.js'; +import { TestProjectCreator } from '../../test-project-setup/test_project_creator.js'; +import { TestProjectBase } from '../../test-project-setup/test_project_base.js'; +import { userInfo } from 'os'; +import { ampxCli } from '../../process-controller/process_controller.js'; +import { + ensureDeploymentTimeLessThan, + interruptSandbox, + replaceFiles, + waitForConfigUpdateAfterDeployment, +} from '../../process-controller/predicated_action_macros.js'; +import { BackendIdentifier } from '@aws-amplify/plugin-types'; +import { testConcurrencyLevel } from '../test_concurrency.js'; +import { + amplifySharedSecretNameKey, + createAmplifySharedSecretName, +} from '../../shared_secret.js'; + +/** + * Defines sandbox test + */ +export const defineSandboxTest = (testProjectCreator: TestProjectCreator) => { + void describe('sandbox test', { concurrency: testConcurrencyLevel }, () => { + before(async () => { + await createTestDirectory(rootTestDir); + }); + after(async () => { + await deleteTestDirectory(rootTestDir); + }); + + void describe(`sandbox deploys ${testProjectCreator.name}`, () => { + let testProject: TestProjectBase; + let sandboxBackendIdentifier: BackendIdentifier; + + before(async () => { + testProject = await testProjectCreator.createProject(rootTestDir); + sandboxBackendIdentifier = { + type: 'sandbox', + namespace: testProject.name, + name: userInfo().username, + }; + }); + + after(async () => { + if ( + process.env.AMPLIFY_BACKEND_TESTS_RETAIN_TEST_PROJECT_DEPLOYMENT !== + 'true' + ) { + await testProject.tearDown(sandboxBackendIdentifier); + } + }); + + void describe('in sequence', { concurrency: false }, () => { + const sharedSecretsEnv = { + [amplifySharedSecretNameKey]: createAmplifySharedSecretName(), + }; + void it(`[${testProjectCreator.name}] deploys fully`, async () => { + await testProject.deploy(sandboxBackendIdentifier, sharedSecretsEnv); + await testProject.assertPostDeployment(sandboxBackendIdentifier); + }); + + void it('generates config after sandbox --once deployment', async () => { + const processController = ampxCli( + ['sandbox', '--once'], + testProject.projectDirPath, + { + env: sharedSecretsEnv, + } + ); + await processController + .do(waitForConfigUpdateAfterDeployment()) + .run(); + + await testProject.assertPostDeployment(sandboxBackendIdentifier); + }); + + void it(`[${testProjectCreator.name}] hot-swaps a change`, async () => { + const updates = await testProject.getUpdates(); + if (updates.length > 0) { + const processController = ampxCli( + ['sandbox', '--dirToWatch', 'amplify'], + testProject.projectDirPath, + { + env: sharedSecretsEnv, + } + ); + + for (const update of updates) { + processController + .do(replaceFiles(update.replacements)) + .do(ensureDeploymentTimeLessThan(update.deployThresholdSec)); + } + + // Execute the process. + await processController.do(interruptSandbox()).run(); + + await testProject.assertPostDeployment(sandboxBackendIdentifier); + } + }); + }); + }); + }); +}; diff --git a/packages/integration-tests/src/test-in-memory/data_storage_auth_with_triggers.test.ts b/packages/integration-tests/src/test-in-memory/data_storage_auth_with_triggers.test.ts index 7294be7483..37c7e3c456 100644 --- a/packages/integration-tests/src/test-in-memory/data_storage_auth_with_triggers.test.ts +++ b/packages/integration-tests/src/test-in-memory/data_storage_auth_with_triggers.test.ts @@ -52,6 +52,8 @@ void it('data storage auth with triggers', () => { assertExpectedLogicalIds(templates.defaultNodeFunc, 'AWS::Lambda::Function', [ 'defaultNodeFunctionlambda5C194062', 'echoFunclambdaE17DCA46', + 'funcCustomEmailSenderlambda3CCBA9A6', + 'funcNoMinifylambda91CDF3E0', 'funcWithAwsSdklambda5F770AD7', 'funcWithSchedulelambda0B6E4271', 'funcWithSsmlambda6A8824A1', diff --git a/packages/integration-tests/src/test-live-dependency-health-checks/health_checks.test.ts b/packages/integration-tests/src/test-live-dependency-health-checks/health_checks.test.ts index 8aae8bd8e6..19b475a917 100644 --- a/packages/integration-tests/src/test-live-dependency-health-checks/health_checks.test.ts +++ b/packages/integration-tests/src/test-live-dependency-health-checks/health_checks.test.ts @@ -14,7 +14,6 @@ import { import { confirmDeleteSandbox, interruptSandbox, - rejectCleanupSandbox, waitForSandboxDeploymentToPrintTotalTime, } from '../process-controller/predicated_action_macros.js'; import { BackendIdentifierConversions } from '@aws-amplify/platform-core'; @@ -123,7 +122,6 @@ void describe('Live dependency health checks', { concurrency: true }, () => { await ampxCli(['sandbox'], tempDir) .do(waitForSandboxDeploymentToPrintTotalTime()) .do(interruptSandbox()) - .do(rejectCleanupSandbox()) .run(); const clientConfigStats = await fs.stat( diff --git a/packages/integration-tests/src/test-project-setup/access_testing_project.ts b/packages/integration-tests/src/test-project-setup/access_testing_project.ts index e44010f82d..4430a49397 100644 --- a/packages/integration-tests/src/test-project-setup/access_testing_project.ts +++ b/packages/integration-tests/src/test-project-setup/access_testing_project.ts @@ -45,6 +45,7 @@ import { IamCredentials } from '../types.js'; import { AmplifyAuthCredentialsFactory } from '../amplify_auth_credentials_factory.js'; import { SemVer } from 'semver'; import { AmplifyClient } from '@aws-sdk/client-amplify'; +import { e2eToolingClientConfig } from '../e2e_tooling_client_config.js'; // TODO: this is a work around // it seems like as of amplify v6 , some of the code only runs in the browser ... @@ -69,11 +70,21 @@ export class AccessTestingProjectTestProjectCreator * Creates project creator. */ constructor( - private readonly cfnClient: CloudFormationClient, - private readonly amplifyClient: AmplifyClient, - private readonly cognitoIdentityClient: CognitoIdentityClient, - private readonly cognitoIdentityProviderClient: CognitoIdentityProviderClient, - private readonly stsClient: STSClient + private readonly cfnClient: CloudFormationClient = new CloudFormationClient( + e2eToolingClientConfig + ), + private readonly amplifyClient: AmplifyClient = new AmplifyClient( + e2eToolingClientConfig + ), + private readonly cognitoIdentityClient: CognitoIdentityClient = new CognitoIdentityClient( + e2eToolingClientConfig + ), + private readonly cognitoIdentityProviderClient: CognitoIdentityProviderClient = new CognitoIdentityProviderClient( + e2eToolingClientConfig + ), + private readonly stsClient: STSClient = new STSClient( + e2eToolingClientConfig + ) ) {} createProject = async (e2eProjectDir: string): Promise => { @@ -147,7 +158,7 @@ class AccessTestingProjectTestProject extends TestProjectBase { backendId: BackendIdentifier ): Promise { await super.assertPostDeployment(backendId); - const clientConfig = await generateClientConfig(backendId, '1.2'); + const clientConfig = await generateClientConfig(backendId, '1.3'); await this.assertDifferentCognitoInstanceCannotAssumeAmplifyRoles( clientConfig ); @@ -160,7 +171,7 @@ class AccessTestingProjectTestProject extends TestProjectBase { * I.e. roles not created by auth construct. */ private assertGenericIamRolesAccessToData = async ( - clientConfig: ClientConfigVersionTemplateType<'1.2'> + clientConfig: ClientConfigVersionTemplateType<'1.3'> ) => { if (!clientConfig.custom) { throw new Error('Client config is missing custom section'); @@ -262,7 +273,7 @@ class AccessTestingProjectTestProject extends TestProjectBase { * This asserts that authenticated and unauthenticated roles have relevant access to data API. */ private assertAmplifyAuthAccessToData = async ( - clientConfig: ClientConfigVersionTemplateType<'1.2'> + clientConfig: ClientConfigVersionTemplateType<'1.3'> ): Promise => { if (!clientConfig.auth) { throw new Error('Client config is missing auth section'); @@ -367,7 +378,7 @@ class AccessTestingProjectTestProject extends TestProjectBase { * unauthorized roles. I.e. it tests trust policy. */ private assertDifferentCognitoInstanceCannotAssumeAmplifyRoles = async ( - clientConfig: ClientConfigVersionTemplateType<'1.2'> + clientConfig: ClientConfigVersionTemplateType<'1.3'> ): Promise => { const simpleAuthUser = await this.createAuthenticatedSimpleAuthCognitoUser( clientConfig @@ -416,7 +427,7 @@ class AccessTestingProjectTestProject extends TestProjectBase { }; private createAuthenticatedSimpleAuthCognitoUser = async ( - clientConfig: ClientConfigVersionTemplateType<'1.2'> + clientConfig: ClientConfigVersionTemplateType<'1.3'> ): Promise => { if (!clientConfig.custom) { throw new Error('Client config is missing custom section'); @@ -496,7 +507,7 @@ class AccessTestingProjectTestProject extends TestProjectBase { }; private createAppSyncClient = ( - clientConfig: ClientConfigVersionTemplateType<'1.2'>, + clientConfig: ClientConfigVersionTemplateType<'1.3'>, credentials: IamCredentials ): ApolloClient => { if (!clientConfig.data?.url) { diff --git a/packages/integration-tests/src/test-project-setup/cdk/auth_cdk_project.ts b/packages/integration-tests/src/test-project-setup/cdk/auth_cdk_project.ts index 7010f5d295..97a3692efe 100644 --- a/packages/integration-tests/src/test-project-setup/cdk/auth_cdk_project.ts +++ b/packages/integration-tests/src/test-project-setup/cdk/auth_cdk_project.ts @@ -22,7 +22,9 @@ export class AuthTestCdkProjectCreator implements TestCdkProjectCreator { /** * Constructor. */ - constructor(private readonly resourceFinder: DeployedResourcesFinder) {} + constructor( + private readonly resourceFinder: DeployedResourcesFinder = new DeployedResourcesFinder() + ) {} createProject = async ( e2eProjectDir: string @@ -78,7 +80,7 @@ class AuthTestCdkProject extends TestCdkProjectBase { { stackName: this.stackName, }, - '1.2', //version of the config + '1.3', //version of the config awsClientProvider ); diff --git a/packages/integration-tests/src/test-project-setup/cdk/create_empty_cdk_project.ts b/packages/integration-tests/src/test-project-setup/cdk/create_empty_cdk_project.ts index 67d3b67758..72db39e0fe 100644 --- a/packages/integration-tests/src/test-project-setup/cdk/create_empty_cdk_project.ts +++ b/packages/integration-tests/src/test-project-setup/cdk/create_empty_cdk_project.ts @@ -24,5 +24,15 @@ export const createEmptyCdkProject = async ( await cdkCli(['init', 'app', '--language', 'typescript'], projectRoot).run(); + // Remove local node_modules after CDK init. + // This is to make sure that test project is using same version of + // CDK and constructs as the rest of the codebase. + // Otherwise, we might get errors about incompatible classes if + // dependencies on npm are ahead of our package-lock. + await fsp.rm(path.join(projectRoot, 'node_modules'), { + recursive: true, + force: true, + }); + return { projectName, projectRoot }; }; diff --git a/packages/integration-tests/src/test-project-setup/cdk/test_cdk_project_creator.ts b/packages/integration-tests/src/test-project-setup/cdk/test_cdk_project_creator.ts index 6f0e94efe2..c1b6d2ef6e 100644 --- a/packages/integration-tests/src/test-project-setup/cdk/test_cdk_project_creator.ts +++ b/packages/integration-tests/src/test-project-setup/cdk/test_cdk_project_creator.ts @@ -1,10 +1,6 @@ -import { CloudFormationClient } from '@aws-sdk/client-cloudformation'; -import { e2eToolingClientConfig } from '../../e2e_tooling_client_config.js'; import { TestCdkProjectBase } from './test_cdk_project_base.js'; -import { AuthTestCdkProjectCreator } from './auth_cdk_project.js'; import { fileURLToPath } from 'node:url'; import path from 'path'; -import { DeployedResourcesFinder } from '../../find_deployed_resource.js'; const dirname = path.dirname(fileURLToPath(import.meta.url)); export const testCdkProjectsSourceRoot = path.resolve( @@ -20,15 +16,3 @@ export type TestCdkProjectCreator = { readonly name: string; createProject: (e2eProjectDir: string) => Promise; }; - -/** - * Generates a list of test cdk projects. - */ -export const getTestCdkProjectCreators = (): TestCdkProjectCreator[] => { - const testCdkProjectCreators: TestCdkProjectCreator[] = []; - - const cfnClient = new CloudFormationClient(e2eToolingClientConfig); - const resourceFinder = new DeployedResourcesFinder(cfnClient); - testCdkProjectCreators.push(new AuthTestCdkProjectCreator(resourceFinder)); - return testCdkProjectCreators; -}; diff --git a/packages/integration-tests/src/test-project-setup/conversation_handler_project.ts b/packages/integration-tests/src/test-project-setup/conversation_handler_project.ts index 47b51c4cb3..53a9ea44a9 100644 --- a/packages/integration-tests/src/test-project-setup/conversation_handler_project.ts +++ b/packages/integration-tests/src/test-project-setup/conversation_handler_project.ts @@ -7,10 +7,7 @@ import { AmplifyClient } from '@aws-sdk/client-amplify'; import { BackendIdentifier } from '@aws-amplify/plugin-types'; import { InvokeCommand, LambdaClient } from '@aws-sdk/client-lambda'; import { DeployedResourcesFinder } from '../find_deployed_resource.js'; -import { - ConversationMessage, - ConversationTurnEvent, -} from '@aws-amplify/ai-constructs/conversation/runtime'; +import { ConversationTurnEvent } from '@aws-amplify/ai-constructs/conversation/runtime'; import { randomUUID } from 'crypto'; import { generateClientConfig } from '@aws-amplify/client-config'; import { AmplifyAuthCredentialsFactory } from '../amplify_auth_credentials_factory.js'; @@ -34,6 +31,8 @@ import { } from '../test-projects/conversation-handler/amplify/constants.js'; import { resolve } from 'path'; import { fileURLToPath } from 'url'; +import * as bedrock from '@aws-sdk/client-bedrock-runtime'; +import { e2eToolingClientConfig } from '../e2e_tooling_client_config.js'; // TODO: this is a work around // it seems like as of amplify v6 , some of the code only runs in the browser ... @@ -49,37 +48,52 @@ if (process.versions.node) { type ConversationTurnAppSyncResponse = { associatedUserMessageId: string; content: string; + errors?: Array; +}; + +type ConversationMessage = { + role: 'user' | 'assistant'; + content: Array; }; +type ConversationMessageContentBlock = + | bedrock.ContentBlock + | { + image: Omit & { + // Upstream (Appsync) may send images in a form of Base64 encoded strings + source: { bytes: string }; + }; + // These are needed so that union with other content block types works. + // See https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-bedrock-runtime/TypeAlias/ContentBlock/. + text?: never; + document?: never; + toolUse?: never; + toolResult?: never; + guardContent?: never; + $unknown?: never; + }; + type CreateConversationMessageChatInput = ConversationMessage & { conversationId: string; id: string; associatedUserMessageId?: string; }; -const commonEventProperties = { - responseMutation: { - name: 'createConversationMessageAssistantResponse', - inputTypeName: 'CreateConversationMessageAssistantResponseInput', - selectionSet: [ - 'id', - 'conversationId', - 'content', - 'owner', - 'createdAt', - 'updatedAt', - ].join('\n'), - }, - messageHistoryQuery: { - getQueryName: 'getConversationMessageChat', - getQueryInputTypeName: 'ID', - listQueryName: 'listConversationMessageChats', - listQueryInputTypeName: 'ModelConversationMessageChatFilterInput', - }, - modelConfiguration: { - modelId: bedrockModelId, - systemPrompt: 'You are helpful bot.', - }, +type ConversationTurnError = { + errorType: string; + message: string; +}; + +type ConversationTurnAppSyncResponseChunk = { + conversationId: string; + associatedUserMessageId: string; + contentBlockIndex: number; + contentBlockText?: string; + contentBlockDeltaIndex?: number; + contentBlockDoneAtIndex?: number; + contentBlockToolUse?: string; + stopReason?: string; + errors?: Array; }; /** @@ -94,11 +108,19 @@ export class ConversationHandlerTestProjectCreator * Creates project creator. */ constructor( - private readonly cfnClient: CloudFormationClient, - private readonly amplifyClient: AmplifyClient, - private readonly lambdaClient: LambdaClient, - private readonly cognitoIdentityProviderClient: CognitoIdentityProviderClient, - private readonly resourceFinder: DeployedResourcesFinder + private readonly cfnClient: CloudFormationClient = new CloudFormationClient( + e2eToolingClientConfig + ), + private readonly amplifyClient: AmplifyClient = new AmplifyClient( + e2eToolingClientConfig + ), + private readonly lambdaClient: LambdaClient = new LambdaClient( + e2eToolingClientConfig + ), + private readonly cognitoIdentityProviderClient: CognitoIdentityProviderClient = new CognitoIdentityProviderClient( + e2eToolingClientConfig + ), + private readonly resourceFinder: DeployedResourcesFinder = new DeployedResourcesFinder() ) {} createProject = async (e2eProjectDir: string): Promise => { @@ -149,9 +171,13 @@ class ConversationHandlerTestProject extends TestProjectBase { projectAmplifyDirPath: string, cfnClient: CloudFormationClient, amplifyClient: AmplifyClient, - private readonly lambdaClient: LambdaClient, - private readonly cognitoIdentityProviderClient: CognitoIdentityProviderClient, - private readonly resourceFinder: DeployedResourcesFinder + private readonly lambdaClient: LambdaClient = new LambdaClient( + e2eToolingClientConfig + ), + private readonly cognitoIdentityProviderClient: CognitoIdentityProviderClient = new CognitoIdentityProviderClient( + e2eToolingClientConfig + ), + private readonly resourceFinder: DeployedResourcesFinder = new DeployedResourcesFinder() ) { super( name, @@ -175,6 +201,7 @@ class ConversationHandlerTestProject extends TestProjectBase { throw new Error('Conversation handler project must include auth'); } + const dataUrl = clientConfig.data?.url; const authenticatedUserCredentials = await new AmplifyAuthCredentialsFactory( this.cognitoIdentityProviderClient, @@ -199,69 +226,136 @@ class ConversationHandlerTestProject extends TestProjectBase { cache: new InMemoryCache(), }); - await this.assertDefaultConversationHandlerCanExecuteTurn( - backendId, - authenticatedUserCredentials.accessToken, - clientConfig.data.url, - apolloClient, - // Does not use message history lookup. - // This case should be removed when event.messages field is removed. - false + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanExecuteTurn( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + false + ) ); - await this.assertDefaultConversationHandlerCanExecuteTurn( - backendId, - authenticatedUserCredentials.accessToken, - clientConfig.data.url, - apolloClient, - true + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanExecuteTurn( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + true + ) ); - await this.assertDefaultConversationHandlerCanExecuteTurn( - backendId, - authenticatedUserCredentials.accessToken, - clientConfig.data.url, - apolloClient, - true, - // Simulate eventual consistency - true + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanExecuteTurn( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + false, + // Simulate eventual consistency + true + ) ); - await this.assertCustomConversationHandlerCanExecuteTurn( - backendId, - authenticatedUserCredentials.accessToken, - clientConfig.data.url, - apolloClient + await this.executeWithRetry(() => + this.assertCustomConversationHandlerCanExecuteTurn( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + false + ) ); - await this.assertDefaultConversationHandlerCanExecuteTurnWithDataTool( - backendId, - authenticatedUserCredentials.accessToken, - clientConfig.data.url, - apolloClient + await this.executeWithRetry(() => + this.assertCustomConversationHandlerCanExecuteTurn( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + true + ) ); - await this.assertDefaultConversationHandlerCanExecuteTurnWithClientTool( - backendId, - authenticatedUserCredentials.accessToken, - clientConfig.data.url, - apolloClient + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanExecuteTurnWithDataTool( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + false + ) + ); + + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanExecuteTurnWithDataTool( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + true + ) + ); + + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanExecuteTurnWithClientTool( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + false + ) + ); + + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanExecuteTurnWithClientTool( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + true + ) + ); + + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanExecuteTurnWithImage( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + false + ) ); - await this.assertDefaultConversationHandlerCanExecuteTurnWithImage( - backendId, - authenticatedUserCredentials.accessToken, - clientConfig.data.url, - apolloClient, - false + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanExecuteTurnWithImage( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + true + ) ); - await this.assertDefaultConversationHandlerCanExecuteTurnWithImage( - backendId, - authenticatedUserCredentials.accessToken, - clientConfig.data.url, - apolloClient, - true + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanPropagateError( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + true + ) + ); + + await this.executeWithRetry(() => + this.assertDefaultConversationHandlerCanPropagateError( + backendId, + authenticatedUserCredentials.accessToken, + dataUrl, + apolloClient, + false + ) ); } @@ -270,7 +364,7 @@ class ConversationHandlerTestProject extends TestProjectBase { accessToken: string, graphqlApiEndpoint: string, apolloClient: ApolloClient, - useMessageHistory: boolean, + streamResponse: boolean, withoutMessageAvailableInTheMessageList = false ): Promise => { const defaultConversationHandlerFunction = ( @@ -300,31 +394,16 @@ class ConversationHandlerTestProject extends TestProjectBase { request: { headers: { authorization: accessToken }, }, - ...commonEventProperties, + ...this.getCommonEventProperties(streamResponse), }; - if (useMessageHistory) { - if (withoutMessageAvailableInTheMessageList) { - // This tricks conversation handler to think that message is not available in the list. - // I.e. it simulates eventually consistency read at list operation where item is not yet visible. - // In this case handler should fall back to lookup by current message id. - message.conversationId = randomUUID().toString(); - } - await this.insertMessage(apolloClient, message); - } else { - event.messageHistoryQuery = { - getQueryName: '', - getQueryInputTypeName: '', - listQueryName: '', - listQueryInputTypeName: '', - }; - event.messages = [ - { - role: message.role, - content: message.content, - }, - ]; + if (withoutMessageAvailableInTheMessageList) { + // This tricks conversation handler to think that message is not available in the list. + // I.e. it simulates eventually consistency read at list operation where item is not yet visible. + // In this case handler should fall back to lookup by current message id. + message.conversationId = randomUUID().toString(); } + await this.insertMessage(apolloClient, message); const response = await this.executeConversationTurn( event, @@ -339,7 +418,7 @@ class ConversationHandlerTestProject extends TestProjectBase { accessToken: string, graphqlApiEndpoint: string, apolloClient: ApolloClient, - useMessageHistory: boolean + streamResponse: boolean ): Promise => { const defaultConversationHandlerFunction = ( await this.resourceFinder.findByBackendIdentifier( @@ -388,24 +467,9 @@ class ConversationHandlerTestProject extends TestProjectBase { request: { headers: { authorization: accessToken }, }, - ...commonEventProperties, + ...this.getCommonEventProperties(streamResponse), }; - if (useMessageHistory) { - await this.insertMessage(apolloClient, message); - } else { - event.messageHistoryQuery = { - getQueryName: '', - getQueryInputTypeName: '', - listQueryName: '', - listQueryInputTypeName: '', - }; - event.messages = [ - { - role: message.role, - content: message.content, - }, - ]; - } + await this.insertMessage(apolloClient, message); const response = await this.executeConversationTurn( event, defaultConversationHandlerFunction, @@ -420,7 +484,8 @@ class ConversationHandlerTestProject extends TestProjectBase { backendId: BackendIdentifier, accessToken: string, graphqlApiEndpoint: string, - apolloClient: ApolloClient + apolloClient: ApolloClient, + streamResponse: boolean ): Promise => { const defaultConversationHandlerFunction = ( await this.resourceFinder.findByBackendIdentifier( @@ -477,7 +542,7 @@ class ConversationHandlerTestProject extends TestProjectBase { }, ], }, - ...commonEventProperties, + ...this.getCommonEventProperties(streamResponse), }; const response = await this.executeConversationTurn( event, @@ -495,7 +560,8 @@ class ConversationHandlerTestProject extends TestProjectBase { backendId: BackendIdentifier, accessToken: string, graphqlApiEndpoint: string, - apolloClient: ApolloClient + apolloClient: ApolloClient, + streamResponse: boolean ): Promise => { const defaultConversationHandlerFunction = ( await this.resourceFinder.findByBackendIdentifier( @@ -545,7 +611,7 @@ class ConversationHandlerTestProject extends TestProjectBase { }, ], }, - ...commonEventProperties, + ...this.getCommonEventProperties(streamResponse), }; const response = await this.executeConversationTurn( event, @@ -561,11 +627,181 @@ class ConversationHandlerTestProject extends TestProjectBase { assert.match(response.content, /"city":"Seattle"/); }; + private executeConversationTurn = async ( + event: ConversationTurnEvent, + functionName: string, + apolloClient: ApolloClient + ): Promise<{ + content: string; + errors?: Array; + }> => { + console.log( + `Sending event conversationId=${event.conversationId} currentMessageId=${event.currentMessageId}` + ); + await this.lambdaClient.send( + new InvokeCommand({ + FunctionName: functionName, + Payload: Buffer.from(JSON.stringify(event)), + }) + ); + + // assert that response came back + if (event.streamResponse) { + let nextToken: string | undefined; + const chunks: Array = []; + do { + const queryResult = await apolloClient.query<{ + listConversationMessageAssistantStreamingResponses: { + items: Array; + nextToken: string | undefined; + }; + }>({ + query: gql` + query ListMessageChunks( + $conversationId: ID + $associatedUserMessageId: ID + $nextToken: String + ) { + listConversationMessageAssistantStreamingResponses( + limit: 1000 + nextToken: $nextToken + filter: { + conversationId: { eq: $conversationId } + associatedUserMessageId: { eq: $associatedUserMessageId } + } + ) { + items { + associatedUserMessageId + contentBlockDeltaIndex + contentBlockDoneAtIndex + contentBlockIndex + contentBlockText + contentBlockToolUse + conversationId + createdAt + errors { + errorType + message + } + id + owner + stopReason + updatedAt + } + nextToken + } + } + `, + variables: { + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + nextToken, + }, + fetchPolicy: 'no-cache', + }); + nextToken = + queryResult.data.listConversationMessageAssistantStreamingResponses + .nextToken; + chunks.push( + ...queryResult.data.listConversationMessageAssistantStreamingResponses + .items + ); + } while (nextToken); + + assert.ok(chunks); + + if (chunks.length === 1 && chunks[0].errors) { + return { + content: '', + errors: chunks[0].errors, + }; + } + + chunks.sort((a, b) => { + // This is very simplified sort by message,block and delta indexes; + let aValue = 1000 * 1000 * a.contentBlockIndex; + if (a.contentBlockDeltaIndex) { + aValue += a.contentBlockDeltaIndex; + } + let bValue = 1000 * 1000 * b.contentBlockIndex; + if (b.contentBlockDeltaIndex) { + bValue += b.contentBlockDeltaIndex; + } + return aValue - bValue; + }); + + const content = chunks.reduce((accumulated, current) => { + if (current.contentBlockText) { + accumulated += current.contentBlockText; + } + if (current.contentBlockToolUse) { + accumulated += current.contentBlockToolUse; + } + return accumulated; + }, ''); + + return { content }; + } + const queryResult = await apolloClient.query<{ + listConversationMessageAssistantResponses: { + items: Array; + }; + }>({ + query: gql` + query ListMessage($conversationId: ID, $associatedUserMessageId: ID) { + listConversationMessageAssistantResponses( + filter: { + conversationId: { eq: $conversationId } + associatedUserMessageId: { eq: $associatedUserMessageId } + } + limit: 1000 + ) { + items { + conversationId + id + updatedAt + createdAt + content + errors { + errorType + message + } + associatedUserMessageId + } + nextToken + } + } + `, + variables: { + conversationId: event.conversationId, + associatedUserMessageId: event.currentMessageId, + }, + fetchPolicy: 'no-cache', + }); + assert.strictEqual( + 1, + queryResult.data.listConversationMessageAssistantResponses.items.length + ); + const response = + queryResult.data.listConversationMessageAssistantResponses.items[0]; + + if (response.errors) { + return { + content: '', + errors: response.errors, + }; + } + + assert.ok(response.content); + return { content: response.content }; + }; + private assertCustomConversationHandlerCanExecuteTurn = async ( backendId: BackendIdentifier, accessToken: string, graphqlApiEndpoint: string, - apolloClient: ApolloClient + apolloClient: ApolloClient, + streamResponse: boolean ): Promise => { const customConversationHandlerFunction = ( await this.resourceFinder.findByBackendIdentifier( @@ -581,7 +817,7 @@ class ConversationHandlerTestProject extends TestProjectBase { role: 'user', content: [ { - text: 'What is the temperature in Seattle, Boston and Miami?', + text: 'What is the temperature in Seattle and Boston?', }, ], }; @@ -595,7 +831,7 @@ class ConversationHandlerTestProject extends TestProjectBase { request: { headers: { authorization: accessToken }, }, - ...commonEventProperties, + ...this.getCommonEventProperties(streamResponse), }; const response = await this.executeConversationTurn( event, @@ -615,55 +851,61 @@ class ConversationHandlerTestProject extends TestProjectBase { expectedTemperaturesInProgrammaticToolScenario.Boston.toString() ) ); - assert.match( - response.content, - new RegExp( - expectedTemperaturesInProgrammaticToolScenario.Miami.toString() - ) - ); }; - private executeConversationTurn = async ( - event: ConversationTurnEvent, - functionName: string, - apolloClient: ApolloClient - ): Promise => { - await this.lambdaClient.send( - new InvokeCommand({ - FunctionName: functionName, - Payload: Buffer.from(JSON.stringify(event)), - }) - ); + private assertDefaultConversationHandlerCanPropagateError = async ( + backendId: BackendIdentifier, + accessToken: string, + graphqlApiEndpoint: string, + apolloClient: ApolloClient, + streamResponse: boolean + ): Promise => { + const defaultConversationHandlerFunction = ( + await this.resourceFinder.findByBackendIdentifier( + backendId, + 'AWS::Lambda::Function', + (name) => name.includes('default') + ) + )[0]; - // assert that response came back + const message: CreateConversationMessageChatInput = { + id: randomUUID().toString(), + conversationId: randomUUID().toString(), + role: 'user', + content: [ + { + text: 'What is the value of PI?', + }, + ], + }; - const queryResult = await apolloClient.query<{ - listConversationMessageAssistantResponses: { - items: Array; - }; - }>({ - query: gql` - query ListMessages { - listConversationMessageAssistantResponses(limit: 1000) { - items { - conversationId - id - updatedAt - createdAt - content - associatedUserMessageId - } - } - } - `, - fetchPolicy: 'no-cache', - }); - const response = - queryResult.data.listConversationMessageAssistantResponses.items.find( - (item) => item.associatedUserMessageId === event.currentMessageId - ); - assert.ok(response); - return response; + // send event + const event: ConversationTurnEvent = { + conversationId: message.conversationId, + currentMessageId: message.id, + graphqlApiEndpoint: graphqlApiEndpoint, + request: { + headers: { authorization: accessToken }, + }, + ...this.getCommonEventProperties(streamResponse), + }; + + // Inject failure + event.modelConfiguration.modelId = 'invalidId'; + await this.insertMessage(apolloClient, message); + + const response = await this.executeConversationTurn( + event, + defaultConversationHandlerFunction, + apolloClient + ); + assert.ok(response.errors); + assert.ok(response.errors[0]); + assert.strictEqual(response.errors[0].errorType, 'ValidationException'); + assert.match( + response.errors[0].message, + /provided model identifier is invalid/ + ); }; private insertMessage = async ( @@ -683,4 +925,65 @@ class ConversationHandlerTestProject extends TestProjectBase { }, }); }; + + private getCommonEventProperties = (streamResponse: boolean) => { + const responseMutation = streamResponse + ? { + name: 'createConversationMessageAssistantStreamingResponse', + inputTypeName: + 'CreateConversationMessageAssistantStreamingResponseInput', + selectionSet: ['id', 'conversationId', 'createdAt', 'updatedAt'].join( + '\n' + ), + } + : { + name: 'createConversationMessageAssistantResponse', + inputTypeName: 'CreateConversationMessageAssistantResponseInput', + selectionSet: [ + 'id', + 'conversationId', + 'content', + 'owner', + 'createdAt', + 'updatedAt', + ].join('\n'), + }; + return { + streamResponse, + responseMutation, + messageHistoryQuery: { + getQueryName: 'getConversationMessageChat', + getQueryInputTypeName: 'ID', + listQueryName: 'listConversationMessageChats', + listQueryInputTypeName: 'ModelConversationMessageChatFilterInput', + }, + modelConfiguration: { + modelId: bedrockModelId, + systemPrompt: 'You are helpful bot.', + }, + }; + }; + + /** + * Bedrock sometimes produces empty response or half backed response. + * On the other hand we have to run some assertions on those responses. + * Therefore, we wrap transactions in retry loop. + */ + private executeWithRetry = async ( + callable: () => Promise, + maxAttempts = 3 + ) => { + const collectedErrors = []; + for (let i = 1; i <= maxAttempts; i++) { + try { + await callable(); + // if successful return early; + return; + } catch (e) { + collectedErrors.push(e); + } + } + // if we got here there were only errors + throw new AggregateError(collectedErrors); + }; } diff --git a/packages/integration-tests/src/test-project-setup/create_empty_amplify_project.ts b/packages/integration-tests/src/test-project-setup/create_empty_amplify_project.ts index 97afcf198b..b86f280f32 100644 --- a/packages/integration-tests/src/test-project-setup/create_empty_amplify_project.ts +++ b/packages/integration-tests/src/test-project-setup/create_empty_amplify_project.ts @@ -19,7 +19,12 @@ export const createEmptyAmplifyProject = async ( projectDotAmplifyDir: string; }> => { const projectRoot = await fs.mkdtemp(path.join(parentDir, projectDirName)); - const projectName = `${TEST_PROJECT_PREFIX}-${projectDirName}-${shortUuid()}`; + let projectName = `${TEST_PROJECT_PREFIX}-${projectDirName}`; + if ( + process.env.AMPLIFY_BACKEND_TESTS_RETAIN_TEST_PROJECT_DEPLOYMENT !== 'true' + ) { + projectName += `-${shortUuid()}`; + } await fs.writeFile( path.join(projectRoot, 'package.json'), JSON.stringify({ name: projectName, type: 'module' }, null, 2) diff --git a/packages/integration-tests/src/test-project-setup/custom_outputs.ts b/packages/integration-tests/src/test-project-setup/custom_outputs.ts index 7e6b367e5a..0a20230e25 100644 --- a/packages/integration-tests/src/test-project-setup/custom_outputs.ts +++ b/packages/integration-tests/src/test-project-setup/custom_outputs.ts @@ -12,6 +12,7 @@ import { } from '@aws-amplify/client-config'; import assert from 'node:assert'; import { AmplifyClient } from '@aws-sdk/client-amplify'; +import { e2eToolingClientConfig } from '../e2e_tooling_client_config.js'; /** * Creates minimal test projects with custom outputs. @@ -23,8 +24,12 @@ export class CustomOutputsTestProjectCreator implements TestProjectCreator { * Creates project creator. */ constructor( - private readonly cfnClient: CloudFormationClient, - private readonly amplifyClient: AmplifyClient + private readonly cfnClient: CloudFormationClient = new CloudFormationClient( + e2eToolingClientConfig + ), + private readonly amplifyClient: AmplifyClient = new AmplifyClient( + e2eToolingClientConfig + ) ) {} createProject = async (e2eProjectDir: string): Promise => { diff --git a/packages/integration-tests/src/test-project-setup/data_storage_auth_with_triggers.ts b/packages/integration-tests/src/test-project-setup/data_storage_auth_with_triggers.ts index 27077d8359..d9924bc238 100644 --- a/packages/integration-tests/src/test-project-setup/data_storage_auth_with_triggers.ts +++ b/packages/integration-tests/src/test-project-setup/data_storage_auth_with_triggers.ts @@ -1,5 +1,5 @@ import fs from 'fs/promises'; -import { SecretClient } from '@aws-amplify/backend-secret'; +import { SecretClient, getSecretClient } from '@aws-amplify/backend-secret'; import { BackendIdentifier } from '@aws-amplify/plugin-types'; import { createEmptyAmplifyProject } from './create_empty_amplify_project.js'; import { CloudFormationClient } from '@aws-sdk/client-cloudformation'; @@ -9,7 +9,11 @@ import path from 'path'; import { TestProjectCreator } from './test_project_creator.js'; import { DeployedResourcesFinder } from '../find_deployed_resource.js'; import assert from 'node:assert'; -import { InvokeCommand, LambdaClient } from '@aws-sdk/client-lambda'; +import { + GetFunctionCommand, + InvokeCommand, + LambdaClient, +} from '@aws-sdk/client-lambda'; import { amplifySharedSecretNameKey, createAmplifySharedSecretName, @@ -28,6 +32,11 @@ import { } from '@aws-sdk/client-cloudtrail'; import { e2eToolingClientConfig } from '../e2e_tooling_client_config.js'; import isMatch from 'lodash.ismatch'; +import { TextWriter, ZipReader } from '@zip.js/zip.js'; +import { + AdminCreateUserCommand, + CognitoIdentityProviderClient, +} from '@aws-sdk/client-cognito-identity-provider'; /** * Creates test projects with data, storage, and auth categories. @@ -41,15 +50,32 @@ export class DataStorageAuthWithTriggerTestProjectCreator * Creates project creator. */ constructor( - private readonly cfnClient: CloudFormationClient, - private readonly amplifyClient: AmplifyClient, - private readonly secretClient: SecretClient, - private readonly lambdaClient: LambdaClient, - private readonly s3Client: S3Client, - private readonly iamClient: IAMClient, - private readonly sqsClient: SQSClient, - private readonly cloudTrailClient: CloudTrailClient, - private readonly resourceFinder: DeployedResourcesFinder + private readonly cfnClient: CloudFormationClient = new CloudFormationClient( + e2eToolingClientConfig + ), + private readonly amplifyClient: AmplifyClient = new AmplifyClient( + e2eToolingClientConfig + ), + private readonly secretClient: SecretClient = getSecretClient( + e2eToolingClientConfig + ), + private readonly lambdaClient: LambdaClient = new LambdaClient( + e2eToolingClientConfig + ), + private readonly s3Client: S3Client = new S3Client(e2eToolingClientConfig), + private readonly iamClient: IAMClient = new IAMClient( + e2eToolingClientConfig + ), + private readonly sqsClient: SQSClient = new SQSClient( + e2eToolingClientConfig + ), + private readonly cloudTrailClient: CloudTrailClient = new CloudTrailClient( + e2eToolingClientConfig + ), + private readonly resourceFinder: DeployedResourcesFinder = new DeployedResourcesFinder(), + private readonly cognitoClient: CognitoIdentityProviderClient = new CognitoIdentityProviderClient( + e2eToolingClientConfig + ) ) {} createProject = async (e2eProjectDir: string): Promise => { @@ -68,7 +94,8 @@ export class DataStorageAuthWithTriggerTestProjectCreator this.iamClient, this.sqsClient, this.cloudTrailClient, - this.resourceFinder + this.resourceFinder, + this.cognitoClient ); await fs.cp( project.sourceProjectAmplifyDirURL, @@ -135,7 +162,8 @@ class DataStorageAuthWithTriggerTestProject extends TestProjectBase { private readonly iamClient: IAMClient, private readonly sqsClient: SQSClient, private readonly cloudTrailClient: CloudTrailClient, - private readonly resourceFinder: DeployedResourcesFinder + private readonly resourceFinder: DeployedResourcesFinder, + private readonly cognitoClient: CognitoIdentityProviderClient ) { super( name, @@ -238,11 +266,24 @@ class DataStorageAuthWithTriggerTestProject extends TestProjectBase { (name) => name.includes('funcWithSchedule') ); + const funcNoMinify = await this.resourceFinder.findByBackendIdentifier( + backendId, + 'AWS::Lambda::Function', + (name) => name.includes('funcNoMinify') + ); + const funcCustomEmailSender = + await this.resourceFinder.findByBackendIdentifier( + backendId, + 'AWS::Lambda::Function', + (name) => name.includes('funcCustomEmailSender') + ); + assert.equal(defaultNodeLambda.length, 1); assert.equal(node16Lambda.length, 1); assert.equal(funcWithSsm.length, 1); assert.equal(funcWithAwsSdk.length, 1); assert.equal(funcWithSchedule.length, 1); + assert.equal(funcCustomEmailSender.length, 1); const expectedResponse = { s3TestContent: 'this is some test content', @@ -254,10 +295,19 @@ class DataStorageAuthWithTriggerTestProject extends TestProjectBase { await this.checkLambdaResponse(defaultNodeLambda[0], expectedResponse); await this.checkLambdaResponse(node16Lambda[0], expectedResponse); await this.checkLambdaResponse(funcWithSsm[0], 'It is working'); - await this.checkLambdaResponse(funcWithAwsSdk[0], 'It is working'); + + // Custom email sender assertion + await this.assertCustomEmailSenderWorks(backendId); await this.assertScheduleInvokesFunction(backendId); + const expectedNoMinifyChunk = [ + 'var handler = async () => {', + ' return "No minify";', + '};', + ].join('\n'); + await this.checkLambdaCode(funcNoMinify[0], expectedNoMinifyChunk); + const bucketName = await this.resourceFinder.findByBackendIdentifier( backendId, 'AWS::S3::Bucket', @@ -307,7 +357,7 @@ class DataStorageAuthWithTriggerTestProject extends TestProjectBase { assert.ok(fileContent.includes('newKey: string;')); // Env var added via addEnvironment assert.ok(fileContent.includes('TEST_SECRET: string;')); // Env var added via defineFunction - // assert storage access paths are correct in stack outputs + // assert specific config are correct in the outputs file const outputsObject = JSON.parse( await fs.readFile( path.join(this.projectDirPath, 'amplify_outputs.json'), @@ -331,6 +381,21 @@ class DataStorageAuthWithTriggerTestProject extends TestProjectBase { }, }) ); + + assert.ok( + isMatch(outputsObject.auth.groups, [ + { + Editors: { + precedence: 2, // previously 0 but was overwritten + }, + }, + { + Admins: { + precedence: 1, + }, + }, + ]) + ); } private getUpdateReplacementDefinition = (suffix: string) => ({ @@ -390,6 +455,25 @@ class DataStorageAuthWithTriggerTestProject extends TestProjectBase { assert.deepStrictEqual(responsePayload, expectedResponse); }; + private checkLambdaCode = async ( + lambdaName: string, + expectedCode: string + ) => { + // get the lambda code + const response = await this.lambdaClient.send( + new GetFunctionCommand({ FunctionName: lambdaName }) + ); + const codeUrl = response.Code?.Location; + assert(codeUrl !== undefined); + const fetchResponse = await fetch(codeUrl); + const zipReader = new ZipReader(fetchResponse.body!); + const entries = await zipReader.getEntries(); + const entry = entries.find((entry) => entry.filename.endsWith('index.mjs')); + assert(entry !== undefined); + const sourceCode = await entry.getData!(new TextWriter()); + assert(sourceCode.includes(expectedCode)); + }; + private assertExpectedCleanup = async () => { await this.waitForBucketDeletion(this.testBucketName); await this.assertRolesDoNotExist(this.testRoleNames); @@ -537,7 +621,7 @@ class DataStorageAuthWithTriggerTestProject extends TestProjectBase { ) => { const TIMEOUT_MS = 1000 * 60 * 2; // 2 minutes const startTime = Date.now(); - let messageCount = 0; + let receivedMessageCount = 0; const queue = await this.resourceFinder.findByBackendIdentifier( backendId, @@ -546,16 +630,17 @@ class DataStorageAuthWithTriggerTestProject extends TestProjectBase { ); // wait for schedule to invoke the function one time for it to send a message - while (Date.now() - startTime < TIMEOUT_MS && messageCount < 1) { + while (Date.now() - startTime < TIMEOUT_MS) { const response = await this.sqsClient.send( new ReceiveMessageCommand({ QueueUrl: queue[0], WaitTimeSeconds: 20, + MaxNumberOfMessages: 10, }) ); if (response.Messages) { - messageCount += response.Messages.length; + receivedMessageCount += response.Messages.length; // delete messages afterwards for (const message of response.Messages) { @@ -568,5 +653,90 @@ class DataStorageAuthWithTriggerTestProject extends TestProjectBase { } } } + + if (receivedMessageCount === 0) { + assert.fail( + `The scheduled function failed to invoke and send a message to the queue.` + ); + } + }; + + private assertCustomEmailSenderWorks = async ( + backendId: BackendIdentifier + ) => { + const TIMEOUT_MS = 1000 * 60 * 2; // 2 minutes + const startTime = Date.now(); + const queue = await this.resourceFinder.findByBackendIdentifier( + backendId, + 'AWS::SQS::Queue', + (name) => name.includes('customEmailSenderQueue') + ); + + assert.strictEqual(queue.length, 1, 'Custom email sender queue not found'); + + // Trigger an email sending operation + await this.triggerEmailSending(backendId); + + // Wait for the SQS message + let messageReceived = false; + while (Date.now() - startTime < TIMEOUT_MS && !messageReceived) { + const response = await this.sqsClient.send( + new ReceiveMessageCommand({ + QueueUrl: queue[0], + WaitTimeSeconds: 20, + }) + ); + + if (response.Messages && response.Messages.length > 0) { + messageReceived = true; + // Verify the message content + const messageBody = JSON.parse(response.Messages[0].Body || '{}'); + assert.strictEqual( + messageBody.message, + 'Custom Email Sender is working', + 'Unexpected message content' + ); + + // Delete the message + await this.sqsClient.send( + new DeleteMessageCommand({ + QueueUrl: queue[0], + ReceiptHandle: response.Messages[0].ReceiptHandle!, + }) + ); + } + } + + assert.strictEqual( + messageReceived, + true, + 'Custom email sender was not triggered within the timeout period' + ); + }; + + private triggerEmailSending = async (backendId: BackendIdentifier) => { + const userPoolId = await this.resourceFinder.findByBackendIdentifier( + backendId, + 'AWS::Cognito::UserPool', + () => true + ); + + assert.strictEqual(userPoolId.length, 1, 'User pool not found'); + + const username = `testuser_${Date.now()}@example.com`; + const password = 'TestPassword123!'; + + await this.cognitoClient.send( + new AdminCreateUserCommand({ + UserPoolId: userPoolId[0], + Username: username, + TemporaryPassword: password, + UserAttributes: [ + { Name: 'email', Value: username }, + { Name: 'email_verified', Value: 'true' }, + ], + }) + ); + // The creation of a new user should trigger the custom email sender }; } diff --git a/packages/integration-tests/src/test-project-setup/minimal_with_typescript_idioms.ts b/packages/integration-tests/src/test-project-setup/minimal_with_typescript_idioms.ts index 983f11a211..b7c1886df1 100644 --- a/packages/integration-tests/src/test-project-setup/minimal_with_typescript_idioms.ts +++ b/packages/integration-tests/src/test-project-setup/minimal_with_typescript_idioms.ts @@ -4,6 +4,7 @@ import { createEmptyAmplifyProject } from './create_empty_amplify_project.js'; import { CloudFormationClient } from '@aws-sdk/client-cloudformation'; import { TestProjectCreator } from './test_project_creator.js'; import { AmplifyClient } from '@aws-sdk/client-amplify'; +import { e2eToolingClientConfig } from '../e2e_tooling_client_config.js'; /** * Creates minimal test projects with typescript idioms. @@ -17,8 +18,12 @@ export class MinimalWithTypescriptIdiomTestProjectCreator * Creates project creator. */ constructor( - private readonly cfnClient: CloudFormationClient, - private readonly amplifyClient: AmplifyClient + private readonly cfnClient: CloudFormationClient = new CloudFormationClient( + e2eToolingClientConfig + ), + private readonly amplifyClient: AmplifyClient = new AmplifyClient( + e2eToolingClientConfig + ) ) {} createProject = async (e2eProjectDir: string): Promise => { diff --git a/packages/integration-tests/src/test-project-setup/setup_deployed_backend_client.ts b/packages/integration-tests/src/test-project-setup/setup_deployed_backend_client.ts index 618ec72b26..66704f60cf 100644 --- a/packages/integration-tests/src/test-project-setup/setup_deployed_backend_client.ts +++ b/packages/integration-tests/src/test-project-setup/setup_deployed_backend_client.ts @@ -1,4 +1,10 @@ import { execa } from 'execa'; +import fsp from 'fs/promises'; +import { fileURLToPath } from 'node:url'; + +const packageLockPath = fileURLToPath( + new URL('../../../../package-lock.json', import.meta.url) +); /** * Configures package.json for testing the specified project directory with the version of deployed-backend-client on npm @@ -9,4 +15,14 @@ export const setupDeployedBackendClient = async ( await execa('npm', ['install', '@aws-amplify/deployed-backend-client'], { cwd: projectRootDirPath, }); + + // Install constructs version that is matching our package lock. + // Otherwise, the test might fail due to incompatible properties + // when two definitions are present. + const packageLock = JSON.parse(await fsp.readFile(packageLockPath, 'utf-8')); + const constructsVersion = + packageLock.packages['node_modules/constructs'].version; + await execa('npm', ['install', `constructs@${constructsVersion}`], { + cwd: projectRootDirPath, + }); }; diff --git a/packages/integration-tests/src/test-project-setup/test_project_base.ts b/packages/integration-tests/src/test-project-setup/test_project_base.ts index c6ab0284fe..1b1650a5b3 100644 --- a/packages/integration-tests/src/test-project-setup/test_project_base.ts +++ b/packages/integration-tests/src/test-project-setup/test_project_base.ts @@ -9,7 +9,6 @@ import { ampxCli } from '../process-controller/process_controller.js'; import { confirmDeleteSandbox, interruptSandbox, - rejectCleanupSandbox, waitForSandboxDeploymentToPrintTotalTime, } from '../process-controller/predicated_action_macros.js'; @@ -77,7 +76,6 @@ export abstract class TestProjectBase { }) .do(waitForSandboxDeploymentToPrintTotalTime()) .do(interruptSandbox()) - .do(rejectCleanupSandbox()) .run(); } else { await ampxCli( diff --git a/packages/integration-tests/src/test-project-setup/test_project_creator.ts b/packages/integration-tests/src/test-project-setup/test_project_creator.ts index 95cf523aa4..7327156759 100644 --- a/packages/integration-tests/src/test-project-setup/test_project_creator.ts +++ b/packages/integration-tests/src/test-project-setup/test_project_creator.ts @@ -1,78 +1,6 @@ import { TestProjectBase } from './test_project_base.js'; -import { CloudFormationClient } from '@aws-sdk/client-cloudformation'; -import { getSecretClient } from '@aws-amplify/backend-secret'; -import { DataStorageAuthWithTriggerTestProjectCreator } from './data_storage_auth_with_triggers.js'; -import { MinimalWithTypescriptIdiomTestProjectCreator } from './minimal_with_typescript_idioms.js'; -import { ConversationHandlerTestProjectCreator } from './conversation_handler_project.js'; -import { LambdaClient } from '@aws-sdk/client-lambda'; -import { DeployedResourcesFinder } from '../find_deployed_resource.js'; -import { e2eToolingClientConfig } from '../e2e_tooling_client_config.js'; -import { CustomOutputsTestProjectCreator } from './custom_outputs.js'; -import { S3Client } from '@aws-sdk/client-s3'; -import { IAMClient } from '@aws-sdk/client-iam'; -import { AccessTestingProjectTestProjectCreator } from './access_testing_project.js'; -import { CognitoIdentityProviderClient } from '@aws-sdk/client-cognito-identity-provider'; -import { CognitoIdentityClient } from '@aws-sdk/client-cognito-identity'; -import { STSClient } from '@aws-sdk/client-sts'; -import { AmplifyClient } from '@aws-sdk/client-amplify'; -import { SQSClient } from '@aws-sdk/client-sqs'; -import { CloudTrailClient } from '@aws-sdk/client-cloudtrail'; export type TestProjectCreator = { readonly name: string; createProject: (e2eProjectDir: string) => Promise; }; - -/** - * Generates a list of test projects. - */ -export const getTestProjectCreators = (): TestProjectCreator[] => { - const testProjectCreators: TestProjectCreator[] = []; - - const cfnClient = new CloudFormationClient(e2eToolingClientConfig); - const cloudTrailClient = new CloudTrailClient(e2eToolingClientConfig); - const amplifyClient = new AmplifyClient(e2eToolingClientConfig); - const cognitoIdentityClient = new CognitoIdentityClient( - e2eToolingClientConfig - ); - const cognitoIdentityProviderClient = new CognitoIdentityProviderClient( - e2eToolingClientConfig - ); - const lambdaClient = new LambdaClient(e2eToolingClientConfig); - const s3Client = new S3Client(e2eToolingClientConfig); - const iamClient = new IAMClient(e2eToolingClientConfig); - const sqsClient = new SQSClient(e2eToolingClientConfig); - const resourceFinder = new DeployedResourcesFinder(cfnClient); - const stsClient = new STSClient(e2eToolingClientConfig); - const secretClient = getSecretClient(e2eToolingClientConfig); - testProjectCreators.push( - new DataStorageAuthWithTriggerTestProjectCreator( - cfnClient, - amplifyClient, - secretClient, - lambdaClient, - s3Client, - iamClient, - sqsClient, - cloudTrailClient, - resourceFinder - ), - new MinimalWithTypescriptIdiomTestProjectCreator(cfnClient, amplifyClient), - new CustomOutputsTestProjectCreator(cfnClient, amplifyClient), - new AccessTestingProjectTestProjectCreator( - cfnClient, - amplifyClient, - cognitoIdentityClient, - cognitoIdentityProviderClient, - stsClient - ), - new ConversationHandlerTestProjectCreator( - cfnClient, - amplifyClient, - lambdaClient, - cognitoIdentityProviderClient, - resourceFinder - ) - ); - return testProjectCreators; -}; diff --git a/packages/integration-tests/src/test-projects/conversation-handler/amplify/backend.ts b/packages/integration-tests/src/test-projects/conversation-handler/amplify/backend.ts index 3c6fa81c2e..f6914ace4e 100644 --- a/packages/integration-tests/src/test-projects/conversation-handler/amplify/backend.ts +++ b/packages/integration-tests/src/test-projects/conversation-handler/amplify/backend.ts @@ -9,11 +9,24 @@ const backend = defineBackend({ auth, data, customConversationHandler }); const stack = backend.createStack('conversationHandlerStack'); -new ConversationHandlerFunction(stack, 'defaultConversationHandlerFunction', { - models: [ - { - modelId: bedrockModelId, - region: stack.region, - }, - ], -}); +const defaultConversationHandler = new ConversationHandlerFunction( + stack, + 'defaultConversationHandlerFunction', + { + models: [ + { + modelId: bedrockModelId, + region: stack.region, + }, + ], + } +); + +defaultConversationHandler.resources.cfnResources.cfnFunction.addPropertyOverride( + 'LoggingConfig.ApplicationLogLevel', + 'DEBUG' +); +backend.customConversationHandler.resources.cfnResources.cfnFunction.addPropertyOverride( + 'LoggingConfig.ApplicationLogLevel', + 'DEBUG' +); diff --git a/packages/integration-tests/src/test-projects/conversation-handler/amplify/constants.ts b/packages/integration-tests/src/test-projects/conversation-handler/amplify/constants.ts index 32bd11a5e2..60961d6130 100644 --- a/packages/integration-tests/src/test-projects/conversation-handler/amplify/constants.ts +++ b/packages/integration-tests/src/test-projects/conversation-handler/amplify/constants.ts @@ -11,7 +11,6 @@ export const bedrockModelId = 'anthropic.claude-3-haiku-20240307-v1:0'; export const expectedTemperaturesInProgrammaticToolScenario = { Seattle: 75, Boston: 58, - Miami: 97, }; export const expectedTemperatureInDataToolScenario = 85; diff --git a/packages/integration-tests/src/test-projects/conversation-handler/amplify/custom-conversation-handler/custom_handler.ts b/packages/integration-tests/src/test-projects/conversation-handler/amplify/custom-conversation-handler/custom_handler.ts index 922bbf3fd7..add8b2c80b 100644 --- a/packages/integration-tests/src/test-projects/conversation-handler/amplify/custom-conversation-handler/custom_handler.ts +++ b/packages/integration-tests/src/test-projects/conversation-handler/amplify/custom-conversation-handler/custom_handler.ts @@ -21,7 +21,7 @@ const thermometer = createExecutableTool( }, (input) => { const city = input.city; - if (city === 'Seattle' || city === 'Boston' || city === 'Miami') { + if (city === 'Seattle' || city === 'Boston') { return Promise.resolve({ // We use this value in test assertion. // LLM uses tool to get temperature and serves this value in final response. diff --git a/packages/integration-tests/src/test-projects/conversation-handler/amplify/data/resource.ts b/packages/integration-tests/src/test-projects/conversation-handler/amplify/data/resource.ts index f698574310..07c19400c2 100644 --- a/packages/integration-tests/src/test-projects/conversation-handler/amplify/data/resource.ts +++ b/packages/integration-tests/src/test-projects/conversation-handler/amplify/data/resource.ts @@ -86,12 +86,43 @@ const schema = a.schema({ tools: a.ref('MockTool').array(), }), + MockConversationTurnError: a.customType({ + errorType: a.string(), + message: a.string(), + }), + ConversationMessageAssistantResponse: a .model({ conversationId: a.id(), associatedUserMessageId: a.id(), content: a.string(), + errors: a.ref('MockConversationTurnError').array(), + }) + .authorization((allow) => [allow.authenticated(), allow.owner()]), + + ConversationMessageAssistantStreamingResponse: a + .model({ + // always + conversationId: a.id().required(), + associatedUserMessageId: a.id().required(), + contentBlockIndex: a.integer(), + accumulatedTurnContent: a.ref('MockContentBlock').array(), + + // these describe chunks or end of block + contentBlockText: a.string(), + contentBlockToolUse: a.string(), + contentBlockDeltaIndex: a.integer(), + contentBlockDoneAtIndex: a.integer(), + + // when message is complete + stopReason: a.string(), + + // error + errors: a.ref('MockConversationTurnError').array(), }) + .secondaryIndexes((index) => [ + index('conversationId').sortKeys(['associatedUserMessageId']), + ]) .authorization((allow) => [allow.authenticated(), allow.owner()]), ConversationMessageChat: a @@ -103,6 +134,9 @@ const schema = a.schema({ aiContext: a.json(), toolConfiguration: a.ref('MockToolConfiguration'), }) + .secondaryIndexes((index) => [ + index('conversationId').sortKeys(['associatedUserMessageId']), + ]) .authorization((allow) => [allow.authenticated(), allow.owner()]), }); diff --git a/packages/integration-tests/src/test-projects/custom-outputs/amplify/backend.ts b/packages/integration-tests/src/test-projects/custom-outputs/amplify/backend.ts index 82b87ac655..ceed80b636 100644 --- a/packages/integration-tests/src/test-projects/custom-outputs/amplify/backend.ts +++ b/packages/integration-tests/src/test-projects/custom-outputs/amplify/backend.ts @@ -16,7 +16,7 @@ const sampleIdentityPoolId = 'test_identity_pool_id'; const sampleUserPoolClientId = 'test_user_pool_client_id'; backend.addOutput({ - version: '1.2', + version: '1.3', custom: { // test deploy time values restApiUrl: restApi.url, @@ -26,7 +26,7 @@ backend.addOutput({ }); backend.addOutput({ - version: '1.2', + version: '1.3', custom: { // test synth time values // and composition of config @@ -36,7 +36,7 @@ backend.addOutput({ const fakeCognitoUserPoolId = 'fakeCognitoUserPoolId'; backend.addOutput({ - version: '1.2', + version: '1.3', // test reserved key auth: { aws_region: sampleRegion, diff --git a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/auth/resource.ts b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/auth/resource.ts index 097a822ddb..14c55e503e 100644 --- a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/auth/resource.ts +++ b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/auth/resource.ts @@ -1,5 +1,9 @@ import { defineAuth, secret } from '@aws-amplify/backend'; -import { defaultNodeFunc } from '../function.js'; +import { defaultNodeFunc, funcCustomEmailSender } from '../function.js'; + +const customEmailSenderFunction = { + handler: funcCustomEmailSender, +}; export const auth = defineAuth({ loginWith: { @@ -21,8 +25,11 @@ export const auth = defineAuth({ logoutUrls: ['https://logout.com'], }, }, + senders: { + email: customEmailSenderFunction, + }, triggers: { postConfirmation: defaultNodeFunc, }, - groups: ['Admins'], + groups: ['Editors', 'Admins'], }); diff --git a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/backend.ts b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/backend.ts index 4cd85ed1e3..c47ce414bd 100644 --- a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/backend.ts +++ b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/backend.ts @@ -7,6 +7,9 @@ import { Stack } from 'aws-cdk-lib'; const backend = defineBackend(dataStorageAuthWithTriggers); backend.defaultNodeFunc.addEnvironment('newKey', 'newValue'); +// Change precedence of Editors group so Admins group has the lowest precedence +backend.auth.resources.groups['Editors'].cfnUserGroup.precedence = 2; + const scheduleFunctionLambda = backend.funcWithSchedule.resources.lambda; const scheduleFunctionLambdaRole = scheduleFunctionLambda.role; const queueStack = Stack.of(scheduleFunctionLambda); @@ -23,3 +26,27 @@ if (scheduleFunctionLambdaRole) { ); } backend.funcWithSchedule.addEnvironment('SQS_QUEUE_URL', queue.queueUrl); + +// Queue setup for customEmailSender + +const customEmailSenderLambda = backend.funcCustomEmailSender.resources.lambda; +const customEmailSenderLambdaRole = customEmailSenderLambda.role; +const customEmailSenderQueueStack = Stack.of(customEmailSenderLambda); +const emailSenderQueue = new Queue( + customEmailSenderQueueStack, + 'amplify-customEmailSenderQueue' +); + +if (customEmailSenderLambdaRole) { + emailSenderQueue.grantSendMessages( + Role.fromRoleArn( + customEmailSenderQueueStack, + 'CustomEmailSenderLambdaExecutionRole', + customEmailSenderLambdaRole.roleArn + ) + ); +} +backend.funcCustomEmailSender.addEnvironment( + 'CUSTOM_EMAIL_SENDER_SQS_QUEUE_URL', + emailSenderQueue.queueUrl +); diff --git a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/func-src/handler_custom_email_sender.ts b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/func-src/handler_custom_email_sender.ts new file mode 100644 index 0000000000..e52be2ea0a --- /dev/null +++ b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/func-src/handler_custom_email_sender.ts @@ -0,0 +1,28 @@ +import { SQSClient, SendMessageCommand } from '@aws-sdk/client-sqs'; + +/** + * This function asserts that custom email sender function is working properly + */ +export const handler = async () => { + const sqsClient = new SQSClient({ region: process.env.region }); + + const queueUrl = process.env.CUSTOM_EMAIL_SENDER_SQS_QUEUE_URL; + + if (!queueUrl) { + throw new Error('SQS_QUEUE_URL is not set in environment variables'); + } + + const messageBody = JSON.stringify({ + message: 'Custom Email Sender is working', + timeStamp: new Date().toISOString(), + }); + + await sqsClient.send( + new SendMessageCommand({ + QueueUrl: queueUrl, + MessageBody: messageBody, + }) + ); + + return 'It is working'; +}; diff --git a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/func-src/handler_no_minify.ts b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/func-src/handler_no_minify.ts new file mode 100644 index 0000000000..f5a3fff455 --- /dev/null +++ b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/func-src/handler_no_minify.ts @@ -0,0 +1,6 @@ +/** + * This function asserts that the code is not minified. + */ +export const handler = async () => { + return 'No minify'; +}; diff --git a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/function.ts b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/function.ts index 2405878c75..7265fb2ac4 100644 --- a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/function.ts +++ b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/function.ts @@ -50,3 +50,16 @@ export const funcWithSchedule = defineFunction({ entry: './func-src/handler_with_aws_sqs.ts', schedule: '* * * * ?', }); + +export const funcNoMinify = defineFunction({ + name: 'funcNoMinify', + entry: './func-src/handler_no_minify.ts', + bundling: { + minify: false, + }, +}); + +export const funcCustomEmailSender = defineFunction({ + name: 'funcCustomEmailSender', + entry: './func-src/handler_custom_email_sender.ts', +}); diff --git a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/test_factories.ts b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/test_factories.ts index 49227af615..73ee6b0479 100644 --- a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/test_factories.ts +++ b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/amplify/test_factories.ts @@ -5,6 +5,8 @@ import { funcWithAwsSdk, node16Func, funcWithSchedule, + funcNoMinify, + funcCustomEmailSender, } from './function.js'; import { storage } from './storage/resource.js'; import { auth } from './auth/resource.js'; @@ -18,4 +20,6 @@ export const dataStorageAuthWithTriggers = { funcWithSsm, funcWithAwsSdk, funcWithSchedule, + funcNoMinify, + funcCustomEmailSender, }; diff --git a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/hotswap-update-files/function.ts b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/hotswap-update-files/function.ts index 61a9171a16..a0abf3e302 100644 --- a/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/hotswap-update-files/function.ts +++ b/packages/integration-tests/src/test-projects/data-storage-auth-with-triggers-ts/hotswap-update-files/function.ts @@ -52,3 +52,16 @@ export const funcWithSchedule = defineFunction({ entry: './func-src/handler_with_aws_sqs.ts', schedule: '* * * * ?', }); + +export const funcNoMinify = defineFunction({ + name: 'funcNoMinify', + entry: './func-src/handler_no_minify.ts', + bundling: { + minify: false, + }, +}); + +export const funcCustomEmailSender = defineFunction({ + name: 'funcCustomEmailSender', + entry: './func-src/handler_custom_email_sender.ts', +}); diff --git a/packages/platform-core/API.md b/packages/platform-core/API.md index 928d4ada67..b0230e19c6 100644 --- a/packages/platform-core/API.md +++ b/packages/platform-core/API.md @@ -24,6 +24,7 @@ export abstract class AmplifyError extends Error { static fromError: (error: unknown) => AmplifyError<'UnknownFault' | 'CredentialsError' | 'InvalidCommandInputError' | 'DomainNotFoundError' | 'SyntaxError'>; // (undocumented) static fromStderr: (_stderr: string) => AmplifyError | undefined; + static isAmplifyError: (error: unknown) => error is AmplifyError; // (undocumented) readonly link?: string; // (undocumented) diff --git a/packages/platform-core/CHANGELOG.md b/packages/platform-core/CHANGELOG.md index 51c7f3c8f4..81f027b825 100644 --- a/packages/platform-core/CHANGELOG.md +++ b/packages/platform-core/CHANGELOG.md @@ -1,5 +1,11 @@ # @aws-amplify/platform-core +## 1.2.0 + +### Minor Changes + +- 583a3f2: Fix detection of AmplifyErrors + ## 1.1.0 ### Minor Changes diff --git a/packages/platform-core/package.json b/packages/platform-core/package.json index d62d258aa9..c24f9b8382 100644 --- a/packages/platform-core/package.json +++ b/packages/platform-core/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/platform-core", - "version": "1.1.0", + "version": "1.2.0", "type": "commonjs", "publishConfig": { "access": "public" diff --git a/packages/platform-core/src/errors/amplify_error.test.ts b/packages/platform-core/src/errors/amplify_error.test.ts index 1f19ac057d..2da01c0e28 100644 --- a/packages/platform-core/src/errors/amplify_error.test.ts +++ b/packages/platform-core/src/errors/amplify_error.test.ts @@ -165,7 +165,7 @@ void describe('AmplifyError.fromError', async () => { yargsErrors.forEach((error) => { const actual = AmplifyError.fromError(error); assert.ok( - actual instanceof AmplifyError && + AmplifyError.isAmplifyError(actual) && actual.name === 'InvalidCommandInputError', `Failed the test for error ${error.message}` ); @@ -175,7 +175,8 @@ void describe('AmplifyError.fromError', async () => { const error = new Error('getaddrinfo ENOTFOUND some-domain.com'); const actual = AmplifyError.fromError(error); assert.ok( - actual instanceof AmplifyError && actual.name === 'DomainNotFoundError', + AmplifyError.isAmplifyError(actual) && + actual.name === 'DomainNotFoundError', `Failed the test for error ${error.message}` ); }); @@ -184,7 +185,7 @@ void describe('AmplifyError.fromError', async () => { error.name = 'SyntaxError'; const actual = AmplifyError.fromError(error); assert.ok( - actual instanceof AmplifyError && actual.name === 'SyntaxError', + AmplifyError.isAmplifyError(actual) && actual.name === 'SyntaxError', `Failed the test for error ${error.message}` ); }); diff --git a/packages/platform-core/src/errors/amplify_error.ts b/packages/platform-core/src/errors/amplify_error.ts index e9c524d572..a61963a320 100644 --- a/packages/platform-core/src/errors/amplify_error.ts +++ b/packages/platform-core/src/errors/amplify_error.ts @@ -44,7 +44,7 @@ export abstract class AmplifyError extends Error { this.code = options.code; this.link = options.link; - if (cause && cause instanceof AmplifyError) { + if (cause && AmplifyError.isAmplifyError(cause)) { cause.serializedError = undefined; } this.serializedError = JSON.stringify( @@ -98,6 +98,26 @@ export abstract class AmplifyError extends Error { return undefined; }; + /** + * This function is a type predicate for AmplifyError. + * See https://www.typescriptlang.org/docs/handbook/2/narrowing.html#using-type-predicates. + * + * Checks if error is an AmplifyError by inspecting if required properties are set. + * This is recommended instead of instanceof operator. + * The instance of operator does not work as expected if AmplifyError class is loaded + * from multiple sources, for example when package manager decides to not de-duplicate dependencies. + * See https://github.com/nodejs/node/issues/17943. + */ + static isAmplifyError = (error: unknown): error is AmplifyError => { + return ( + error instanceof Error && + 'classification' in error && + (error.classification === 'ERROR' || error.classification === 'FAULT') && + typeof error.name === 'string' && + typeof error.message === 'string' + ); + }; + static fromError = ( error: unknown ): AmplifyError< diff --git a/packages/plugin-types/CHANGELOG.md b/packages/plugin-types/CHANGELOG.md index 552d310278..3b99723649 100644 --- a/packages/plugin-types/CHANGELOG.md +++ b/packages/plugin-types/CHANGELOG.md @@ -1,5 +1,11 @@ # @aws-amplify/plugin-types +## 1.3.1 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 + ## 1.3.0 ### Minor Changes diff --git a/packages/plugin-types/package.json b/packages/plugin-types/package.json index 70711832d6..65404568f2 100644 --- a/packages/plugin-types/package.json +++ b/packages/plugin-types/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/plugin-types", - "version": "1.3.0", + "version": "1.3.1", "types": "lib/index.d.ts", "type": "commonjs", "publishConfig": { @@ -11,7 +11,7 @@ }, "license": "Apache-2.0", "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0", "@aws-sdk/types": "^3.609.0" }, diff --git a/packages/sandbox/CHANGELOG.md b/packages/sandbox/CHANGELOG.md index 8ba88ce39b..904304aaa8 100644 --- a/packages/sandbox/CHANGELOG.md +++ b/packages/sandbox/CHANGELOG.md @@ -1,5 +1,26 @@ # @aws-amplify/sandbox +## 1.2.5 + +### Patch Changes + +- 583a3f2: Fix detection of AmplifyErrors +- Updated dependencies [583a3f2] + - @aws-amplify/platform-core@1.2.0 + - @aws-amplify/backend-deployer@1.1.8 + +## 1.2.4 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- Updated dependencies [c3c3057] +- Updated dependencies [b56d344] + - @aws-amplify/cli-core@1.2.0 + - @aws-amplify/backend-deployer@1.1.6 + - @aws-amplify/client-config@1.5.1 + - @aws-amplify/plugin-types@1.3.1 + ## 1.2.3 ### Patch Changes diff --git a/packages/sandbox/package.json b/packages/sandbox/package.json index b5c04aa0c8..d089774801 100644 --- a/packages/sandbox/package.json +++ b/packages/sandbox/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/sandbox", - "version": "1.2.3", + "version": "1.2.5", "type": "module", "publishConfig": { "access": "public" @@ -19,13 +19,13 @@ }, "license": "Apache-2.0", "dependencies": { - "@aws-amplify/backend-deployer": "^1.1.3", + "@aws-amplify/backend-deployer": "^1.1.8", "@aws-amplify/backend-secret": "^1.1.2", - "@aws-amplify/cli-core": "^1.1.3", - "@aws-amplify/client-config": "^1.3.1", + "@aws-amplify/cli-core": "^1.2.0", + "@aws-amplify/client-config": "^1.5.1", "@aws-amplify/deployed-backend-client": "^1.4.1", - "@aws-amplify/platform-core": "^1.0.6", - "@aws-amplify/plugin-types": "^1.2.2", + "@aws-amplify/platform-core": "^1.2.0", + "@aws-amplify/plugin-types": "^1.3.1", "@aws-sdk/client-cloudwatch-logs": "^3.624.0", "@aws-sdk/client-lambda": "^3.624.0", "@aws-sdk/client-ssm": "^3.624.0", @@ -42,6 +42,6 @@ "@types/parse-gitignore": "^1.0.0" }, "peerDependencies": { - "aws-cdk": "^2.152.0" + "aws-cdk": "^2.158.0" } } diff --git a/packages/sandbox/src/file_watching_sandbox.ts b/packages/sandbox/src/file_watching_sandbox.ts index 39891c1347..62e9a5cb94 100644 --- a/packages/sandbox/src/file_watching_sandbox.ts +++ b/packages/sandbox/src/file_watching_sandbox.ts @@ -273,7 +273,7 @@ export class FileWatchingSandbox extends EventEmitter implements Sandbox { // https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cognito-userpool.html#cfn-cognito-userpool-aliasattributes // offer to recreate the sandbox or revert the change if ( - error instanceof AmplifyError && + AmplifyError.isAmplifyError(error) && error.name === 'CFNUpdateNotSupportedError' ) { await this.handleUnsupportedDestructiveChanges(options); @@ -385,7 +385,7 @@ export class FileWatchingSandbox extends EventEmitter implements Sandbox { message = `${message}\nCaused By: ${error.cause.message}\n`; } - if (error instanceof AmplifyError && error.resolution) { + if (AmplifyError.isAmplifyError(error) && error.resolution) { message = `${message}\nResolution: ${error.resolution}\n`; } } else message = String(error); diff --git a/packages/schema-generator/CHANGELOG.md b/packages/schema-generator/CHANGELOG.md index f74d158e93..488a2cbb71 100644 --- a/packages/schema-generator/CHANGELOG.md +++ b/packages/schema-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # @aws-amplify/schema-generator +## 1.2.5 + +### Patch Changes + +- b56d344: update aws-cdk lib to ^2.158.0 +- b56d344: Upgrade @aws-amplify/graphql-schema-generator to v0.11.0 + ## 1.2.4 ### Patch Changes diff --git a/packages/schema-generator/package.json b/packages/schema-generator/package.json index 7ee91355ad..f860dbc562 100644 --- a/packages/schema-generator/package.json +++ b/packages/schema-generator/package.json @@ -1,6 +1,6 @@ { "name": "@aws-amplify/schema-generator", - "version": "1.2.4", + "version": "1.2.5", "type": "module", "publishConfig": { "access": "public" @@ -18,7 +18,7 @@ "update:api": "api-extractor run --local" }, "dependencies": { - "@aws-amplify/graphql-schema-generator": "^0.9.4", + "@aws-amplify/graphql-schema-generator": "^0.11.0", "@aws-amplify/platform-core": "^1.0.5" }, "license": "Apache-2.0" diff --git a/scripts/cleanup_e2e_resources.ts b/scripts/cleanup_e2e_resources.ts index 536b7b41d2..97463ce744 100644 --- a/scripts/cleanup_e2e_resources.ts +++ b/scripts/cleanup_e2e_resources.ts @@ -6,6 +6,13 @@ import { StackStatus, StackSummary, } from '@aws-sdk/client-cloudformation'; +import { + CloudWatchLogsClient, + DeleteLogGroupCommand, + DescribeLogGroupsCommand, + DescribeLogGroupsCommandOutput, + LogGroup, +} from '@aws-sdk/client-cloudwatch-logs'; import { Bucket, DeleteBucketCommand, @@ -70,6 +77,9 @@ const amplifyClient = new AmplifyClient({ const cfnClient = new CloudFormationClient({ maxAttempts: 5, }); +const cloudWatchClient = new CloudWatchLogsClient({ + maxAttempts: 5, +}); const cognitoClient = new CognitoIdentityProviderClient({ maxAttempts: 5, }); @@ -91,6 +101,7 @@ const TEST_CDK_RESOURCE_PREFIX = 'test-cdk'; /** * Stacks are considered stale after 2 hours. + * Log groups are considered stale after 7 days. For troubleshooting purposes. * Other resources are considered stale after 3 hours. * * Stack deletion triggers asynchronous resource deletion while this script is running. @@ -100,6 +111,7 @@ const TEST_CDK_RESOURCE_PREFIX = 'test-cdk'; */ const stackStaleDurationInMilliseconds = 2 * 60 * 60 * 1000; // 2 hours in milliseconds const staleDurationInMilliseconds = 3 * 60 * 60 * 1000; // 3 hours in milliseconds +const logGroupStaleDurationInMilliseconds = 7 * 24 * 60 * 60 * 1000; // 7 days in milliseconds const isStackStale = ( stackSummary: StackSummary | undefined @@ -113,6 +125,17 @@ const isStackStale = ( ); }; +const isLogGroupStale = ( + logGroup: LogGroup | undefined +): boolean | undefined => { + if (!logGroup?.creationTime) { + return; + } + return ( + now.getTime() - logGroup.creationTime > logGroupStaleDurationInMilliseconds + ); +}; + const isStale = (creationDate: Date | undefined): boolean | undefined => { if (!creationDate) { return; @@ -546,3 +569,47 @@ for (const staleDynamoDBTable of allStaleDynamoDBTables) { ); } } + +const listAllStaleTestLogGroups = async (): Promise> => { + let nextToken: string | undefined = undefined; + const logGroups: Array = []; + do { + const listLogGroupsResponse: DescribeLogGroupsCommandOutput = + await cloudWatchClient.send( + new DescribeLogGroupsCommand({ + nextToken, + }) + ); + nextToken = listLogGroupsResponse.nextToken; + listLogGroupsResponse.logGroups + ?.filter( + (logGroup) => + (logGroup.logGroupName?.startsWith(TEST_AMPLIFY_RESOURCE_PREFIX) || + logGroup.logGroupName?.startsWith( + `/aws/lambda/${TEST_AMPLIFY_RESOURCE_PREFIX}` + )) && + isLogGroupStale(logGroup) + ) + .forEach((item) => { + logGroups.push(item); + }); + } while (nextToken); + return logGroups; +}; + +const allStaleLogGroups = await listAllStaleTestLogGroups(); +for (const logGroup of allStaleLogGroups) { + try { + await cloudWatchClient.send( + new DeleteLogGroupCommand({ + logGroupName: logGroup.logGroupName, + }) + ); + console.log(`Successfully deleted ${logGroup.logGroupName} log group`); + } catch (e) { + const errorMessage = e instanceof Error ? e.message : ''; + console.log( + `Failed to delete ${logGroup.logGroupName} log group. ${errorMessage}` + ); + } +} diff --git a/scripts/components/api-changes-validator/api_usage_statements_generators.ts b/scripts/components/api-changes-validator/api_usage_statements_generators.ts index 3249c91d82..d4e94526a9 100644 --- a/scripts/components/api-changes-validator/api_usage_statements_generators.ts +++ b/scripts/components/api-changes-validator/api_usage_statements_generators.ts @@ -566,7 +566,18 @@ export class CallableUsageStatementsGenerator ).generate().usageStatement ?? ''; let returnValueAssignmentTarget = ''; if (this.functionType.type.kind !== ts.SyntaxKind.VoidKeyword) { - returnValueAssignmentTarget = `const returnValue: ${this.functionType.type.getText()} = `; + let returnType; + if (this.functionType.type.kind === ts.SyntaxKind.TypePredicate) { + // Example type predicate looks like this + // '(input: unknown) => input is SampleType;' + // It's a special syntax that tells compiler that it's safe to assume + // type after invoking the check. + // But when it comes to value assignment this is treated as boolean. + returnType = 'boolean'; + } else { + returnType = this.functionType.type.getText(); + } + returnValueAssignmentTarget = `const returnValue: ${returnType} = `; } const minParameterUsage = new CallableParameterUsageStatementsGenerator( diff --git a/scripts/components/api-changes-validator/test-resources/test-projects/without-breaks/project-without-breaks/API.md b/scripts/components/api-changes-validator/test-resources/test-projects/without-breaks/project-without-breaks/API.md index 410b6690e2..aec702b4f8 100644 --- a/scripts/components/api-changes-validator/test-resources/test-projects/without-breaks/project-without-breaks/API.md +++ b/scripts/components/api-changes-validator/test-resources/test-projects/without-breaks/project-without-breaks/API.md @@ -84,4 +84,10 @@ export type SampleTypeThatReferencesFunction = { export type SampleIgnoredType = { someProperty: string; }; + +export const sampleTypePredicate: (input: unknown) => input is SampleType; + +export class SampleClassWithTypePredicate { + static sampleTypePredicate: (input: unknown) => input is SampleType; +} ``` diff --git a/scripts/components/api-changes-validator/test-resources/test-projects/without-breaks/project-without-breaks/src/index.ts b/scripts/components/api-changes-validator/test-resources/test-projects/without-breaks/project-without-breaks/src/index.ts index a6f2201d71..1243ed5af2 100644 --- a/scripts/components/api-changes-validator/test-resources/test-projects/without-breaks/project-without-breaks/src/index.ts +++ b/scripts/components/api-changes-validator/test-resources/test-projects/without-breaks/project-without-breaks/src/index.ts @@ -115,3 +115,13 @@ export type SampleTypeThatReferencesFunction = { export type SampleIgnoredType = { someProperty: number; }; + +export const sampleTypePredicate = (input: unknown): input is SampleType => { + throw new Error(); +}; + +export class SampleClassWithTypePredicate { + static sampleTypePredicate = (input: unknown): input is SampleType => { + throw new Error(); + }; +} diff --git a/scripts/components/sparse_test_matrix_generator.test.ts b/scripts/components/sparse_test_matrix_generator.test.ts new file mode 100644 index 0000000000..73a0a5af60 --- /dev/null +++ b/scripts/components/sparse_test_matrix_generator.test.ts @@ -0,0 +1,61 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert'; +import { SparseTestMatrixGenerator } from './sparse_test_matrix_generator.js'; +import { fileURLToPath } from 'url'; + +void describe('Sparse matrix generator', () => { + void it('generates sparse matrix', async () => { + const testDirectory = fileURLToPath( + new URL('./test-resources/sparse-generator-test-stubs', import.meta.url) + ); + const matrix = await new SparseTestMatrixGenerator({ + testGlobPattern: `${testDirectory}/*.test.ts`, + dimensions: { + dimension1: ['dim1val1', 'dim1val2', 'dim1,val3'], + dimension2: ['dim2val1', 'dim2val2'], + }, + maxTestsPerJob: 2, + }).generate(); + + assert.deepStrictEqual(matrix, { + include: [ + { + displayNames: 'test3.test.ts test2.test.ts', + dimension1: 'dim1val1', + dimension2: 'dim2val1', + testPaths: `${testDirectory}/test3.test.ts ${testDirectory}/test2.test.ts`, + }, + { + displayNames: 'test3.test.ts test2.test.ts', + dimension1: 'dim1val2', + dimension2: 'dim2val2', + testPaths: `${testDirectory}/test3.test.ts ${testDirectory}/test2.test.ts`, + }, + { + displayNames: 'test3.test.ts test2.test.ts', + dimension1: 'dim1,val3', + dimension2: 'dim2val1', + testPaths: `${testDirectory}/test3.test.ts ${testDirectory}/test2.test.ts`, + }, + { + displayNames: 'test1.test.ts', + dimension1: 'dim1val1', + dimension2: 'dim2val1', + testPaths: `${testDirectory}/test1.test.ts`, + }, + { + displayNames: 'test1.test.ts', + dimension1: 'dim1val2', + dimension2: 'dim2val2', + testPaths: `${testDirectory}/test1.test.ts`, + }, + { + displayNames: 'test1.test.ts', + dimension1: 'dim1,val3', + dimension2: 'dim2val1', + testPaths: `${testDirectory}/test1.test.ts`, + }, + ], + }); + }); +}); diff --git a/scripts/components/sparse_test_matrix_generator.ts b/scripts/components/sparse_test_matrix_generator.ts new file mode 100644 index 0000000000..4daf7ce0f0 --- /dev/null +++ b/scripts/components/sparse_test_matrix_generator.ts @@ -0,0 +1,93 @@ +import { glob } from 'glob'; +import path from 'path'; + +// See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/running-variations-of-jobs-in-a-workflow +type JobMatrix = { + include?: Array>; +} & Record; + +export type SparseTestMatrixGeneratorProps = { + testGlobPattern: string; + maxTestsPerJob: number; + dimensions: Record>; +}; + +/** + * Generates a sparse test matrix. + * + * Sparse matrix is created is such a way that: + * 1. Every test is included + * 2. Every dimension's value is included + * 3. Algorithm avoids cartesian product of dimensions, just minimal subset that uses all values. + */ +export class SparseTestMatrixGenerator { + /** + * Creates sparse test matrix generator. + */ + constructor(private readonly props: SparseTestMatrixGeneratorProps) { + if (Object.keys(props.dimensions).length === 0) { + throw new Error('At least one dimension is required'); + } + } + + generate = async (): Promise => { + const testPaths = await glob(this.props.testGlobPattern); + + const matrix: JobMatrix = {}; + matrix.include = []; + + for (const testPathsBatch of this.chunkArray( + testPaths, + this.props.maxTestsPerJob + )) { + const dimensionsIndexes: Record = {}; + const dimensionCoverageComplete: Record = {}; + + Object.keys(this.props.dimensions).forEach((key) => { + dimensionsIndexes[key] = 0; + dimensionCoverageComplete[key] = false; + }); + + let allDimensionsComplete = false; + + do { + const matrixEntry: Record = {}; + matrixEntry.displayNames = testPathsBatch + .map((testPath) => path.basename(testPath)) + .join(' '); + Object.keys(this.props.dimensions).forEach((key) => { + matrixEntry[key] = this.props.dimensions[key][dimensionsIndexes[key]]; + }); + matrixEntry.testPaths = testPathsBatch.join(' '); + matrix.include?.push(matrixEntry); + + Object.keys(this.props.dimensions).forEach((key) => { + dimensionsIndexes[key]++; + if (dimensionsIndexes[key] === this.props.dimensions[key].length) { + // mark dimension as complete and start the cycle from start until all dimensions are used. + dimensionCoverageComplete[key] = true; + dimensionsIndexes[key] = 0; + } + }); + + // check if all dimensions are processed. + allDimensionsComplete = Object.keys(this.props.dimensions).reduce( + (acc, key) => { + return acc && dimensionCoverageComplete[key]; + }, + true + ); + } while (!allDimensionsComplete); + } + + return matrix; + }; + + private chunkArray = (array: Array, chunkSize: number) => { + const result: Array> = []; + for (let i = 0; i < array.length; i += chunkSize) { + result.push(array.slice(i, i + chunkSize)); + } + return result; + }; +} diff --git a/scripts/components/test-resources/sparse-generator-test-stubs/test1.test.ts b/scripts/components/test-resources/sparse-generator-test-stubs/test1.test.ts new file mode 100644 index 0000000000..af6cf15235 --- /dev/null +++ b/scripts/components/test-resources/sparse-generator-test-stubs/test1.test.ts @@ -0,0 +1 @@ +// Empty, content doesn't matter. diff --git a/scripts/components/test-resources/sparse-generator-test-stubs/test2.test.ts b/scripts/components/test-resources/sparse-generator-test-stubs/test2.test.ts new file mode 100644 index 0000000000..af6cf15235 --- /dev/null +++ b/scripts/components/test-resources/sparse-generator-test-stubs/test2.test.ts @@ -0,0 +1 @@ +// Empty, content doesn't matter. diff --git a/scripts/components/test-resources/sparse-generator-test-stubs/test3.test.ts b/scripts/components/test-resources/sparse-generator-test-stubs/test3.test.ts new file mode 100644 index 0000000000..af6cf15235 --- /dev/null +++ b/scripts/components/test-resources/sparse-generator-test-stubs/test3.test.ts @@ -0,0 +1 @@ +// Empty, content doesn't matter. diff --git a/scripts/generate_sparse_test_matrix.ts b/scripts/generate_sparse_test_matrix.ts new file mode 100644 index 0000000000..c5a76eb965 --- /dev/null +++ b/scripts/generate_sparse_test_matrix.ts @@ -0,0 +1,31 @@ +import { SparseTestMatrixGenerator } from './components/sparse_test_matrix_generator.js'; + +// This script generates a sparse test matrix. +// Every test must run on each type of OS and each version of node. +// However, we don't have to run every combination. + +if (process.argv.length < 3) { + console.log( + "Usage: npx tsx scripts/generate_sparse_test_matrix.ts '' " + ); +} + +const testGlobPattern = process.argv[2]; +const maxTestsPerJob = process.argv[3] ? parseInt(process.argv[3]) : 2; + +if (!Number.isInteger(maxTestsPerJob)) { + throw new Error( + 'Invalid max tests per job. If you are using glob pattern with starts in bash put it in quotes' + ); +} + +const matrix = await new SparseTestMatrixGenerator({ + testGlobPattern, + maxTestsPerJob, + dimensions: { + 'node-version': ['18', '20'], + os: ['ubuntu-latest', 'macos-14-xlarge', 'windows-latest'], + }, +}).generate(); + +console.log(JSON.stringify(matrix)); diff --git a/templates/construct/package.json b/templates/construct/package.json index 2a879e3768..3494b52996 100644 --- a/templates/construct/package.json +++ b/templates/construct/package.json @@ -18,7 +18,7 @@ }, "license": "Apache-2.0", "peerDependencies": { - "aws-cdk-lib": "^2.152.0", + "aws-cdk-lib": "^2.158.0", "constructs": "^10.0.0" } }