From df938a4543ce5a527984ef88fe27326f1a1c1e45 Mon Sep 17 00:00:00 2001 From: QuantumGhost Date: Mon, 12 Jan 2026 15:07:53 +0800 Subject: [PATCH 1/4] ci: add HITL test env deployment action (#30846) --- .github/workflows/deploy-hitl.yml | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 .github/workflows/deploy-hitl.yml diff --git a/.github/workflows/deploy-hitl.yml b/.github/workflows/deploy-hitl.yml new file mode 100644 index 0000000000..8144ba4f08 --- /dev/null +++ b/.github/workflows/deploy-hitl.yml @@ -0,0 +1,29 @@ +name: Deploy HITL + +on: + workflow_run: + workflows: ["Build and Push API & Web"] + branches: + - "feat/hitl-frontend" + - "feat/hitl-backend" + types: + - completed + +jobs: + deploy: + runs-on: ubuntu-latest + if: | + github.event.workflow_run.conclusion == 'success' && + ( + github.event.workflow_run.head_branch == 'feat/hitl-frontend' || + github.event.workflow_run.head_branch == 'feat/hitl-backend' + ) + steps: + - name: Deploy to server + uses: appleboy/ssh-action@v0.1.8 + with: + host: ${{ secrets.HITL_SSH_HOST }} + username: ${{ secrets.SSH_USER }} + key: ${{ secrets.SSH_PRIVATE_KEY }} + script: | + ${{ vars.SSH_SCRIPT || secrets.SSH_SCRIPT }} From 00698e41b7c6bbded6869dbfaa05bf35350039e3 Mon Sep 17 00:00:00 2001 From: Stephen Zhou <38493346+hyoban@users.noreply.github.com> Date: Mon, 12 Jan 2026 15:33:20 +0800 Subject: [PATCH 2/4] build: limit esbuild, glob, docker base version to avoid cve (#30848) --- web/Dockerfile | 2 +- web/package.json | 4 +- web/pnpm-lock.yaml | 626 +++++++++++++-------------------------------- 3 files changed, 187 insertions(+), 445 deletions(-) diff --git a/web/Dockerfile b/web/Dockerfile index 8697793145..9e08910a77 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -1,5 +1,5 @@ # base image -FROM node:22-alpine3.21 AS base +FROM node:22.21.1-alpine3.23 AS base LABEL maintainer="takatost@gmail.com" # if you located in China, you can use aliyun mirror to speed up diff --git a/web/package.json b/web/package.json index 7537b942fb..4019e49cd9 100644 --- a/web/package.json +++ b/web/package.json @@ -236,7 +236,8 @@ "brace-expansion@<2.0.2": "2.0.2", "devalue@<5.3.2": "5.3.2", "es-iterator-helpers": "npm:@nolyfill/es-iterator-helpers@^1", - "esbuild@<0.25.0": "0.25.0", + "esbuild@<0.27.2": "0.27.2", + "glob@>=10.2.0,<10.5.0": "11.1.0", "hasown": "npm:@nolyfill/hasown@^1", "is-arguments": "npm:@nolyfill/is-arguments@^1", "is-core-module": "npm:@nolyfill/is-core-module@^1", @@ -278,7 +279,6 @@ "@types/react-dom": "~19.2.3", "brace-expansion": "~2.0", "canvas": "^3.2.0", - "esbuild": "~0.25.0", "pbkdf2": "~3.1.3", "prismjs": "~1.30", "string-width": "~4.2.3" diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index f39f7503d9..853c366025 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -5,23 +5,17 @@ settings: excludeLinksFromLockfile: false overrides: + '@eslint/plugin-kit': ~0.3 '@types/react': ~19.2.7 '@types/react-dom': ~19.2.3 - string-width: ~4.2.3 - '@eslint/plugin-kit': ~0.3 + brace-expansion: ~2.0 canvas: ^3.2.0 - esbuild: ~0.25.0 pbkdf2: ~3.1.3 prismjs: ~1.30 - brace-expansion: ~2.0 - '@monaco-editor/loader': 1.5.0 + string-width: ~4.2.3 '@eslint/plugin-kit@<0.3.4': 0.3.4 - brace-expansion@<2.0.2: 2.0.2 - devalue@<5.3.2: 5.3.2 - esbuild@<0.25.0: 0.25.0 - pbkdf2@<3.1.3: 3.1.3 - prismjs@<1.30.0: 1.30.0 - vite@<6.4.1: 6.4.1 + '@monaco-editor/loader': 1.5.0 + '@nolyfill/safe-buffer': npm:safe-buffer@^5.2.1 array-includes: npm:@nolyfill/array-includes@^1 array.prototype.findlast: npm:@nolyfill/array.prototype.findlast@^1 array.prototype.findlastindex: npm:@nolyfill/array.prototype.findlastindex@^1 @@ -29,7 +23,11 @@ overrides: array.prototype.flatmap: npm:@nolyfill/array.prototype.flatmap@^1 array.prototype.tosorted: npm:@nolyfill/array.prototype.tosorted@^1 assert: npm:@nolyfill/assert@^1 + brace-expansion@<2.0.2: 2.0.2 + devalue@<5.3.2: 5.3.2 es-iterator-helpers: npm:@nolyfill/es-iterator-helpers@^1 + esbuild@<0.27.2: 0.27.2 + glob@>=10.2.0,<10.5.0: 11.1.0 hasown: npm:@nolyfill/hasown@^1 is-arguments: npm:@nolyfill/is-arguments@^1 is-core-module: npm:@nolyfill/is-core-module@^1 @@ -41,8 +39,9 @@ overrides: object.fromentries: npm:@nolyfill/object.fromentries@^1 object.groupby: npm:@nolyfill/object.groupby@^1 object.values: npm:@nolyfill/object.values@^1 + pbkdf2@<3.1.3: 3.1.3 + prismjs@<1.30.0: 1.30.0 safe-buffer: ^5.2.1 - '@nolyfill/safe-buffer': npm:safe-buffer@^5.2.1 safe-regex-test: npm:@nolyfill/safe-regex-test@^1 safer-buffer: npm:@nolyfill/safer-buffer@^1 side-channel: npm:@nolyfill/side-channel@^1 @@ -51,6 +50,7 @@ overrides: string.prototype.repeat: npm:@nolyfill/string.prototype.repeat@^1 string.prototype.trimend: npm:@nolyfill/string.prototype.trimend@^1 typed-array-buffer: npm:@nolyfill/typed-array-buffer@^1 + vite@<6.4.1: 6.4.1 which-typed-array: npm:@nolyfill/which-typed-array@^1 importers: @@ -360,7 +360,7 @@ importers: version: 2.3.13(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) '@mdx-js/loader': specifier: ^3.1.1 - version: 3.1.1(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + version: 3.1.1(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) '@mdx-js/react': specifier: ^3.1.1 version: 3.1.1(@types/react@19.2.7)(react@19.2.3) @@ -372,7 +372,7 @@ importers: version: 15.5.9 '@next/mdx': specifier: 15.5.9 - version: 15.5.9(@mdx-js/loader@3.1.1(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)))(@mdx-js/react@3.1.1(@types/react@19.2.7)(react@19.2.3)) + version: 15.5.9(@mdx-js/loader@3.1.1(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)))(@mdx-js/react@3.1.1(@types/react@19.2.7)(react@19.2.3)) '@rgrove/parse-xml': specifier: ^4.2.0 version: 4.2.0 @@ -393,7 +393,7 @@ importers: version: 9.1.13(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))) '@storybook/nextjs': specifier: 9.1.13 - version: 9.1.13(esbuild@0.25.0)(next@15.5.9(@babel/core@7.28.5)(@playwright/test@1.57.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(sass@1.95.0))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(sass@1.95.0)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(type-fest@4.2.0)(typescript@5.9.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + version: 9.1.13(esbuild@0.27.2)(next@15.5.9(@babel/core@7.28.5)(@playwright/test@1.57.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(sass@1.95.0))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(sass@1.95.0)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(type-fest@4.2.0)(typescript@5.9.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) '@storybook/react': specifier: 9.1.17 version: 9.1.17(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3) @@ -1411,308 +1411,158 @@ packages: resolution: {integrity: sha512-Q9hjxWI5xBM+qW2enxfe8wDKdFWMfd0Z29k5ZJnuBqD/CasY5Zryj09aCA6owbGATWz+39p5uIdaHXpopOcG8g==} engines: {node: '>=10'} - '@esbuild/aix-ppc64@0.25.0': - resolution: {integrity: sha512-O7vun9Sf8DFjH2UtqK8Ku3LkquL9SZL8OLY1T5NZkA34+wG3OQF7cl4Ql8vdNzM6fzBbYfLaiRLIOZ+2FOCgBQ==} + '@esbuild/aix-ppc64@0.27.2': + resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==} engines: {node: '>=18'} cpu: [ppc64] os: [aix] - '@esbuild/aix-ppc64@0.25.12': - resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==} - engines: {node: '>=18'} - cpu: [ppc64] - os: [aix] - - '@esbuild/android-arm64@0.25.0': - resolution: {integrity: sha512-grvv8WncGjDSyUBjN9yHXNt+cq0snxXbDxy5pJtzMKGmmpPxeAmAhWxXI+01lU5rwZomDgD3kJwulEnhTRUd6g==} + '@esbuild/android-arm64@0.27.2': + resolution: {integrity: sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==} engines: {node: '>=18'} cpu: [arm64] os: [android] - '@esbuild/android-arm64@0.25.12': - resolution: {integrity: sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [android] - - '@esbuild/android-arm@0.25.0': - resolution: {integrity: sha512-PTyWCYYiU0+1eJKmw21lWtC+d08JDZPQ5g+kFyxP0V+es6VPPSUhM6zk8iImp2jbV6GwjX4pap0JFbUQN65X1g==} + '@esbuild/android-arm@0.27.2': + resolution: {integrity: sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==} engines: {node: '>=18'} cpu: [arm] os: [android] - '@esbuild/android-arm@0.25.12': - resolution: {integrity: sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==} - engines: {node: '>=18'} - cpu: [arm] - os: [android] - - '@esbuild/android-x64@0.25.0': - resolution: {integrity: sha512-m/ix7SfKG5buCnxasr52+LI78SQ+wgdENi9CqyCXwjVR2X4Jkz+BpC3le3AoBPYTC9NHklwngVXvbJ9/Akhrfg==} + '@esbuild/android-x64@0.27.2': + resolution: {integrity: sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==} engines: {node: '>=18'} cpu: [x64] os: [android] - '@esbuild/android-x64@0.25.12': - resolution: {integrity: sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==} - engines: {node: '>=18'} - cpu: [x64] - os: [android] - - '@esbuild/darwin-arm64@0.25.0': - resolution: {integrity: sha512-mVwdUb5SRkPayVadIOI78K7aAnPamoeFR2bT5nszFUZ9P8UpK4ratOdYbZZXYSqPKMHfS1wdHCJk1P1EZpRdvw==} + '@esbuild/darwin-arm64@0.27.2': + resolution: {integrity: sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==} engines: {node: '>=18'} cpu: [arm64] os: [darwin] - '@esbuild/darwin-arm64@0.25.12': - resolution: {integrity: sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [darwin] - - '@esbuild/darwin-x64@0.25.0': - resolution: {integrity: sha512-DgDaYsPWFTS4S3nWpFcMn/33ZZwAAeAFKNHNa1QN0rI4pUjgqf0f7ONmXf6d22tqTY+H9FNdgeaAa+YIFUn2Rg==} + '@esbuild/darwin-x64@0.27.2': + resolution: {integrity: sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==} engines: {node: '>=18'} cpu: [x64] os: [darwin] - '@esbuild/darwin-x64@0.25.12': - resolution: {integrity: sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==} - engines: {node: '>=18'} - cpu: [x64] - os: [darwin] - - '@esbuild/freebsd-arm64@0.25.0': - resolution: {integrity: sha512-VN4ocxy6dxefN1MepBx/iD1dH5K8qNtNe227I0mnTRjry8tj5MRk4zprLEdG8WPyAPb93/e4pSgi1SoHdgOa4w==} + '@esbuild/freebsd-arm64@0.27.2': + resolution: {integrity: sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==} engines: {node: '>=18'} cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-arm64@0.25.12': - resolution: {integrity: sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [freebsd] - - '@esbuild/freebsd-x64@0.25.0': - resolution: {integrity: sha512-mrSgt7lCh07FY+hDD1TxiTyIHyttn6vnjesnPoVDNmDfOmggTLXRv8Id5fNZey1gl/V2dyVK1VXXqVsQIiAk+A==} + '@esbuild/freebsd-x64@0.27.2': + resolution: {integrity: sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==} engines: {node: '>=18'} cpu: [x64] os: [freebsd] - '@esbuild/freebsd-x64@0.25.12': - resolution: {integrity: sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==} - engines: {node: '>=18'} - cpu: [x64] - os: [freebsd] - - '@esbuild/linux-arm64@0.25.0': - resolution: {integrity: sha512-9QAQjTWNDM/Vk2bgBl17yWuZxZNQIF0OUUuPZRKoDtqF2k4EtYbpyiG5/Dk7nqeK6kIJWPYldkOcBqjXjrUlmg==} + '@esbuild/linux-arm64@0.27.2': + resolution: {integrity: sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==} engines: {node: '>=18'} cpu: [arm64] os: [linux] - '@esbuild/linux-arm64@0.25.12': - resolution: {integrity: sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==} - engines: {node: '>=18'} - cpu: [arm64] - os: [linux] - - '@esbuild/linux-arm@0.25.0': - resolution: {integrity: sha512-vkB3IYj2IDo3g9xX7HqhPYxVkNQe8qTK55fraQyTzTX/fxaDtXiEnavv9geOsonh2Fd2RMB+i5cbhu2zMNWJwg==} + '@esbuild/linux-arm@0.27.2': + resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==} engines: {node: '>=18'} cpu: [arm] os: [linux] - '@esbuild/linux-arm@0.25.12': - resolution: {integrity: sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==} - engines: {node: '>=18'} - cpu: [arm] - os: [linux] - - '@esbuild/linux-ia32@0.25.0': - resolution: {integrity: sha512-43ET5bHbphBegyeqLb7I1eYn2P/JYGNmzzdidq/w0T8E2SsYL1U6un2NFROFRg1JZLTzdCoRomg8Rvf9M6W6Gg==} + '@esbuild/linux-ia32@0.27.2': + resolution: {integrity: sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==} engines: {node: '>=18'} cpu: [ia32] os: [linux] - '@esbuild/linux-ia32@0.25.12': - resolution: {integrity: sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==} - engines: {node: '>=18'} - cpu: [ia32] - os: [linux] - - '@esbuild/linux-loong64@0.25.0': - resolution: {integrity: sha512-fC95c/xyNFueMhClxJmeRIj2yrSMdDfmqJnyOY4ZqsALkDrrKJfIg5NTMSzVBr5YW1jf+l7/cndBfP3MSDpoHw==} + '@esbuild/linux-loong64@0.27.2': + resolution: {integrity: sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==} engines: {node: '>=18'} cpu: [loong64] os: [linux] - '@esbuild/linux-loong64@0.25.12': - resolution: {integrity: sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==} - engines: {node: '>=18'} - cpu: [loong64] - os: [linux] - - '@esbuild/linux-mips64el@0.25.0': - resolution: {integrity: sha512-nkAMFju7KDW73T1DdH7glcyIptm95a7Le8irTQNO/qtkoyypZAnjchQgooFUDQhNAy4iu08N79W4T4pMBwhPwQ==} + '@esbuild/linux-mips64el@0.27.2': + resolution: {integrity: sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==} engines: {node: '>=18'} cpu: [mips64el] os: [linux] - '@esbuild/linux-mips64el@0.25.12': - resolution: {integrity: sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==} - engines: {node: '>=18'} - cpu: [mips64el] - os: [linux] - - '@esbuild/linux-ppc64@0.25.0': - resolution: {integrity: sha512-NhyOejdhRGS8Iwv+KKR2zTq2PpysF9XqY+Zk77vQHqNbo/PwZCzB5/h7VGuREZm1fixhs4Q/qWRSi5zmAiO4Fw==} + '@esbuild/linux-ppc64@0.27.2': + resolution: {integrity: sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==} engines: {node: '>=18'} cpu: [ppc64] os: [linux] - '@esbuild/linux-ppc64@0.25.12': - resolution: {integrity: sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==} - engines: {node: '>=18'} - cpu: [ppc64] - os: [linux] - - '@esbuild/linux-riscv64@0.25.0': - resolution: {integrity: sha512-5S/rbP5OY+GHLC5qXp1y/Mx//e92L1YDqkiBbO9TQOvuFXM+iDqUNG5XopAnXoRH3FjIUDkeGcY1cgNvnXp/kA==} + '@esbuild/linux-riscv64@0.27.2': + resolution: {integrity: sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==} engines: {node: '>=18'} cpu: [riscv64] os: [linux] - '@esbuild/linux-riscv64@0.25.12': - resolution: {integrity: sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==} - engines: {node: '>=18'} - cpu: [riscv64] - os: [linux] - - '@esbuild/linux-s390x@0.25.0': - resolution: {integrity: sha512-XM2BFsEBz0Fw37V0zU4CXfcfuACMrppsMFKdYY2WuTS3yi8O1nFOhil/xhKTmE1nPmVyvQJjJivgDT+xh8pXJA==} + '@esbuild/linux-s390x@0.27.2': + resolution: {integrity: sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==} engines: {node: '>=18'} cpu: [s390x] os: [linux] - '@esbuild/linux-s390x@0.25.12': - resolution: {integrity: sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==} - engines: {node: '>=18'} - cpu: [s390x] - os: [linux] - - '@esbuild/linux-x64@0.25.0': - resolution: {integrity: sha512-9yl91rHw/cpwMCNytUDxwj2XjFpxML0y9HAOH9pNVQDpQrBxHy01Dx+vaMu0N1CKa/RzBD2hB4u//nfc+Sd3Cw==} + '@esbuild/linux-x64@0.27.2': + resolution: {integrity: sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==} engines: {node: '>=18'} cpu: [x64] os: [linux] - '@esbuild/linux-x64@0.25.12': - resolution: {integrity: sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==} - engines: {node: '>=18'} - cpu: [x64] - os: [linux] - - '@esbuild/netbsd-arm64@0.25.0': - resolution: {integrity: sha512-RuG4PSMPFfrkH6UwCAqBzauBWTygTvb1nxWasEJooGSJ/NwRw7b2HOwyRTQIU97Hq37l3npXoZGYMy3b3xYvPw==} + '@esbuild/netbsd-arm64@0.27.2': + resolution: {integrity: sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==} engines: {node: '>=18'} cpu: [arm64] os: [netbsd] - '@esbuild/netbsd-arm64@0.25.12': - resolution: {integrity: sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [netbsd] - - '@esbuild/netbsd-x64@0.25.0': - resolution: {integrity: sha512-jl+qisSB5jk01N5f7sPCsBENCOlPiS/xptD5yxOx2oqQfyourJwIKLRA2yqWdifj3owQZCL2sn6o08dBzZGQzA==} + '@esbuild/netbsd-x64@0.27.2': + resolution: {integrity: sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==} engines: {node: '>=18'} cpu: [x64] os: [netbsd] - '@esbuild/netbsd-x64@0.25.12': - resolution: {integrity: sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==} - engines: {node: '>=18'} - cpu: [x64] - os: [netbsd] - - '@esbuild/openbsd-arm64@0.25.0': - resolution: {integrity: sha512-21sUNbq2r84YE+SJDfaQRvdgznTD8Xc0oc3p3iW/a1EVWeNj/SdUCbm5U0itZPQYRuRTW20fPMWMpcrciH2EJw==} + '@esbuild/openbsd-arm64@0.27.2': + resolution: {integrity: sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] - '@esbuild/openbsd-arm64@0.25.12': - resolution: {integrity: sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==} - engines: {node: '>=18'} - cpu: [arm64] - os: [openbsd] - - '@esbuild/openbsd-x64@0.25.0': - resolution: {integrity: sha512-2gwwriSMPcCFRlPlKx3zLQhfN/2WjJ2NSlg5TKLQOJdV0mSxIcYNTMhk3H3ulL/cak+Xj0lY1Ym9ysDV1igceg==} + '@esbuild/openbsd-x64@0.27.2': + resolution: {integrity: sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==} engines: {node: '>=18'} cpu: [x64] os: [openbsd] - '@esbuild/openbsd-x64@0.25.12': - resolution: {integrity: sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==} - engines: {node: '>=18'} - cpu: [x64] - os: [openbsd] - - '@esbuild/openharmony-arm64@0.25.12': - resolution: {integrity: sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==} + '@esbuild/openharmony-arm64@0.27.2': + resolution: {integrity: sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==} engines: {node: '>=18'} cpu: [arm64] os: [openharmony] - '@esbuild/sunos-x64@0.25.0': - resolution: {integrity: sha512-bxI7ThgLzPrPz484/S9jLlvUAHYMzy6I0XiU1ZMeAEOBcS0VePBFxh1JjTQt3Xiat5b6Oh4x7UC7IwKQKIJRIg==} + '@esbuild/sunos-x64@0.27.2': + resolution: {integrity: sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==} engines: {node: '>=18'} cpu: [x64] os: [sunos] - '@esbuild/sunos-x64@0.25.12': - resolution: {integrity: sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==} - engines: {node: '>=18'} - cpu: [x64] - os: [sunos] - - '@esbuild/win32-arm64@0.25.0': - resolution: {integrity: sha512-ZUAc2YK6JW89xTbXvftxdnYy3m4iHIkDtK3CLce8wg8M2L+YZhIvO1DKpxrd0Yr59AeNNkTiic9YLf6FTtXWMw==} + '@esbuild/win32-arm64@0.27.2': + resolution: {integrity: sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==} engines: {node: '>=18'} cpu: [arm64] os: [win32] - '@esbuild/win32-arm64@0.25.12': - resolution: {integrity: sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [win32] - - '@esbuild/win32-ia32@0.25.0': - resolution: {integrity: sha512-eSNxISBu8XweVEWG31/JzjkIGbGIJN/TrRoiSVZwZ6pkC6VX4Im/WV2cz559/TXLcYbcrDN8JtKgd9DJVIo8GA==} + '@esbuild/win32-ia32@0.27.2': + resolution: {integrity: sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==} engines: {node: '>=18'} cpu: [ia32] os: [win32] - '@esbuild/win32-ia32@0.25.12': - resolution: {integrity: sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==} - engines: {node: '>=18'} - cpu: [ia32] - os: [win32] - - '@esbuild/win32-x64@0.25.0': - resolution: {integrity: sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ==} - engines: {node: '>=18'} - cpu: [x64] - os: [win32] - - '@esbuild/win32-x64@0.25.12': - resolution: {integrity: sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==} + '@esbuild/win32-x64@0.27.2': + resolution: {integrity: sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==} engines: {node: '>=18'} cpu: [x64] os: [win32] @@ -5161,20 +5011,15 @@ packages: esbuild-register@3.6.0: resolution: {integrity: sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==} peerDependencies: - esbuild: 0.25.0 + esbuild: 0.27.2 esbuild-wasm@0.27.2: resolution: {integrity: sha512-eUTnl8eh+v8UZIZh4MrMOKDAc8Lm7+NqP3pyuTORGFY1s/o9WoiJgKnwXy+te2J3hX7iRbFSHEyig7GsPeeJyw==} engines: {node: '>=18'} hasBin: true - esbuild@0.25.0: - resolution: {integrity: sha512-BXq5mqc8ltbaN34cDqWuYKyNhX8D/Z0J1xdtdQ8UcIIIyJyz+ZMKUt58tF3SrZ85jcfN/PZYhjR5uDQAYNVbuw==} - engines: {node: '>=18'} - hasBin: true - - esbuild@0.25.12: - resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==} + esbuild@0.27.2: + resolution: {integrity: sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==} engines: {node: '>=18'} hasBin: true @@ -9899,157 +9744,82 @@ snapshots: '@es-joy/resolve.exports@1.2.0': {} - '@esbuild/aix-ppc64@0.25.0': + '@esbuild/aix-ppc64@0.27.2': optional: true - '@esbuild/aix-ppc64@0.25.12': + '@esbuild/android-arm64@0.27.2': optional: true - '@esbuild/android-arm64@0.25.0': + '@esbuild/android-arm@0.27.2': optional: true - '@esbuild/android-arm64@0.25.12': + '@esbuild/android-x64@0.27.2': optional: true - '@esbuild/android-arm@0.25.0': + '@esbuild/darwin-arm64@0.27.2': optional: true - '@esbuild/android-arm@0.25.12': + '@esbuild/darwin-x64@0.27.2': optional: true - '@esbuild/android-x64@0.25.0': + '@esbuild/freebsd-arm64@0.27.2': optional: true - '@esbuild/android-x64@0.25.12': + '@esbuild/freebsd-x64@0.27.2': optional: true - '@esbuild/darwin-arm64@0.25.0': + '@esbuild/linux-arm64@0.27.2': optional: true - '@esbuild/darwin-arm64@0.25.12': + '@esbuild/linux-arm@0.27.2': optional: true - '@esbuild/darwin-x64@0.25.0': + '@esbuild/linux-ia32@0.27.2': optional: true - '@esbuild/darwin-x64@0.25.12': + '@esbuild/linux-loong64@0.27.2': optional: true - '@esbuild/freebsd-arm64@0.25.0': + '@esbuild/linux-mips64el@0.27.2': optional: true - '@esbuild/freebsd-arm64@0.25.12': + '@esbuild/linux-ppc64@0.27.2': optional: true - '@esbuild/freebsd-x64@0.25.0': + '@esbuild/linux-riscv64@0.27.2': optional: true - '@esbuild/freebsd-x64@0.25.12': + '@esbuild/linux-s390x@0.27.2': optional: true - '@esbuild/linux-arm64@0.25.0': + '@esbuild/linux-x64@0.27.2': optional: true - '@esbuild/linux-arm64@0.25.12': + '@esbuild/netbsd-arm64@0.27.2': optional: true - '@esbuild/linux-arm@0.25.0': + '@esbuild/netbsd-x64@0.27.2': optional: true - '@esbuild/linux-arm@0.25.12': + '@esbuild/openbsd-arm64@0.27.2': optional: true - '@esbuild/linux-ia32@0.25.0': + '@esbuild/openbsd-x64@0.27.2': optional: true - '@esbuild/linux-ia32@0.25.12': + '@esbuild/openharmony-arm64@0.27.2': optional: true - '@esbuild/linux-loong64@0.25.0': + '@esbuild/sunos-x64@0.27.2': optional: true - '@esbuild/linux-loong64@0.25.12': + '@esbuild/win32-arm64@0.27.2': optional: true - '@esbuild/linux-mips64el@0.25.0': + '@esbuild/win32-ia32@0.27.2': optional: true - '@esbuild/linux-mips64el@0.25.12': - optional: true - - '@esbuild/linux-ppc64@0.25.0': - optional: true - - '@esbuild/linux-ppc64@0.25.12': - optional: true - - '@esbuild/linux-riscv64@0.25.0': - optional: true - - '@esbuild/linux-riscv64@0.25.12': - optional: true - - '@esbuild/linux-s390x@0.25.0': - optional: true - - '@esbuild/linux-s390x@0.25.12': - optional: true - - '@esbuild/linux-x64@0.25.0': - optional: true - - '@esbuild/linux-x64@0.25.12': - optional: true - - '@esbuild/netbsd-arm64@0.25.0': - optional: true - - '@esbuild/netbsd-arm64@0.25.12': - optional: true - - '@esbuild/netbsd-x64@0.25.0': - optional: true - - '@esbuild/netbsd-x64@0.25.12': - optional: true - - '@esbuild/openbsd-arm64@0.25.0': - optional: true - - '@esbuild/openbsd-arm64@0.25.12': - optional: true - - '@esbuild/openbsd-x64@0.25.0': - optional: true - - '@esbuild/openbsd-x64@0.25.12': - optional: true - - '@esbuild/openharmony-arm64@0.25.12': - optional: true - - '@esbuild/sunos-x64@0.25.0': - optional: true - - '@esbuild/sunos-x64@0.25.12': - optional: true - - '@esbuild/win32-arm64@0.25.0': - optional: true - - '@esbuild/win32-arm64@0.25.12': - optional: true - - '@esbuild/win32-ia32@0.25.0': - optional: true - - '@esbuild/win32-ia32@0.25.12': - optional: true - - '@esbuild/win32-x64@0.25.0': - optional: true - - '@esbuild/win32-x64@0.25.12': + '@esbuild/win32-x64@0.27.2': optional: true '@eslint-community/eslint-plugin-eslint-comments@4.5.0(eslint@9.39.2(jiti@1.21.7))': @@ -10638,12 +10408,12 @@ snapshots: lexical: 0.38.2 yjs: 13.6.27 - '@mdx-js/loader@3.1.1(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3))': + '@mdx-js/loader@3.1.1(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3))': dependencies: '@mdx-js/mdx': 3.1.1 source-map: 0.7.6 optionalDependencies: - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) transitivePeerDependencies: - supports-color @@ -10729,11 +10499,11 @@ snapshots: dependencies: fast-glob: 3.3.1 - '@next/mdx@15.5.9(@mdx-js/loader@3.1.1(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)))(@mdx-js/react@3.1.1(@types/react@19.2.7)(react@19.2.3))': + '@next/mdx@15.5.9(@mdx-js/loader@3.1.1(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)))(@mdx-js/react@3.1.1(@types/react@19.2.7)(react@19.2.3))': dependencies: source-map: 0.7.6 optionalDependencies: - '@mdx-js/loader': 3.1.1(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + '@mdx-js/loader': 3.1.1(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) '@mdx-js/react': 3.1.1(@types/react@19.2.7)(react@19.2.3) '@next/swc-darwin-arm64@15.5.7': @@ -11006,7 +10776,7 @@ snapshots: playwright: 1.57.0 optional: true - '@pmmmwh/react-refresh-webpack-plugin@0.5.17(react-refresh@0.14.2)(type-fest@4.2.0)(webpack-hot-middleware@2.26.1)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3))': + '@pmmmwh/react-refresh-webpack-plugin@0.5.17(react-refresh@0.14.2)(type-fest@4.2.0)(webpack-hot-middleware@2.26.1)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3))': dependencies: ansi-html: 0.0.9 core-js-pure: 3.47.0 @@ -11016,7 +10786,7 @@ snapshots: react-refresh: 0.14.2 schema-utils: 4.3.3 source-map: 0.7.6 - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) optionalDependencies: type-fest: 4.2.0 webpack-hot-middleware: 2.26.1 @@ -11547,22 +11317,22 @@ snapshots: storybook: 9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) ts-dedent: 2.2.0 - '@storybook/builder-webpack5@9.1.13(esbuild@0.25.0)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3)(uglify-js@3.19.3)': + '@storybook/builder-webpack5@9.1.13(esbuild@0.27.2)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3)(uglify-js@3.19.3)': dependencies: '@storybook/core-webpack': 9.1.13(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))) case-sensitive-paths-webpack-plugin: 2.4.0 cjs-module-lexer: 1.4.3 - css-loader: 6.11.0(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + css-loader: 6.11.0(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) es-module-lexer: 1.7.0 - fork-ts-checker-webpack-plugin: 8.0.0(typescript@5.9.3)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) - html-webpack-plugin: 5.6.5(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + fork-ts-checker-webpack-plugin: 8.0.0(typescript@5.9.3)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) + html-webpack-plugin: 5.6.5(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) magic-string: 0.30.21 storybook: 9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) - style-loader: 3.3.4(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) - terser-webpack-plugin: 5.3.15(esbuild@0.25.0)(uglify-js@3.19.3)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + style-loader: 3.3.4(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) + terser-webpack-plugin: 5.3.15(esbuild@0.27.2)(uglify-js@3.19.3)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) ts-dedent: 2.2.0 - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) - webpack-dev-middleware: 6.1.3(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) + webpack-dev-middleware: 6.1.3(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) webpack-hot-middleware: 2.26.1 webpack-virtual-modules: 0.6.2 optionalDependencies: @@ -11591,7 +11361,7 @@ snapshots: react: 19.2.3 react-dom: 19.2.3(react@19.2.3) - '@storybook/nextjs@9.1.13(esbuild@0.25.0)(next@15.5.9(@babel/core@7.28.5)(@playwright/test@1.57.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(sass@1.95.0))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(sass@1.95.0)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(type-fest@4.2.0)(typescript@5.9.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3))': + '@storybook/nextjs@9.1.13(esbuild@0.27.2)(next@15.5.9(@babel/core@7.28.5)(@playwright/test@1.57.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(sass@1.95.0))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(sass@1.95.0)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(type-fest@4.2.0)(typescript@5.9.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3))': dependencies: '@babel/core': 7.28.5 '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.28.5) @@ -11606,33 +11376,33 @@ snapshots: '@babel/preset-react': 7.28.5(@babel/core@7.28.5) '@babel/preset-typescript': 7.28.5(@babel/core@7.28.5) '@babel/runtime': 7.28.4 - '@pmmmwh/react-refresh-webpack-plugin': 0.5.17(react-refresh@0.14.2)(type-fest@4.2.0)(webpack-hot-middleware@2.26.1)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) - '@storybook/builder-webpack5': 9.1.13(esbuild@0.25.0)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3)(uglify-js@3.19.3) - '@storybook/preset-react-webpack': 9.1.13(esbuild@0.25.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3)(uglify-js@3.19.3) + '@pmmmwh/react-refresh-webpack-plugin': 0.5.17(react-refresh@0.14.2)(type-fest@4.2.0)(webpack-hot-middleware@2.26.1)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) + '@storybook/builder-webpack5': 9.1.13(esbuild@0.27.2)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3)(uglify-js@3.19.3) + '@storybook/preset-react-webpack': 9.1.13(esbuild@0.27.2)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3)(uglify-js@3.19.3) '@storybook/react': 9.1.13(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3) '@types/semver': 7.7.1 - babel-loader: 9.2.1(@babel/core@7.28.5)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) - css-loader: 6.11.0(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + babel-loader: 9.2.1(@babel/core@7.28.5)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) + css-loader: 6.11.0(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) image-size: 2.0.2 loader-utils: 3.3.1 next: 15.5.9(@babel/core@7.28.5)(@playwright/test@1.57.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(sass@1.95.0) - node-polyfill-webpack-plugin: 2.0.1(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + node-polyfill-webpack-plugin: 2.0.1(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) postcss: 8.5.6 - postcss-loader: 8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + postcss-loader: 8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) react: 19.2.3 react-dom: 19.2.3(react@19.2.3) react-refresh: 0.14.2 resolve-url-loader: 5.0.0 - sass-loader: 16.0.6(sass@1.95.0)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + sass-loader: 16.0.6(sass@1.95.0)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) semver: 7.7.3 storybook: 9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) - style-loader: 3.3.4(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + style-loader: 3.3.4(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) styled-jsx: 5.1.7(@babel/core@7.28.5)(react@19.2.3) tsconfig-paths: 4.2.0 tsconfig-paths-webpack-plugin: 4.2.0 optionalDependencies: typescript: 5.9.3 - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) transitivePeerDependencies: - '@rspack/core' - '@swc/core' @@ -11651,10 +11421,10 @@ snapshots: - webpack-hot-middleware - webpack-plugin-serve - '@storybook/preset-react-webpack@9.1.13(esbuild@0.25.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3)(uglify-js@3.19.3)': + '@storybook/preset-react-webpack@9.1.13(esbuild@0.27.2)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))(typescript@5.9.3)(uglify-js@3.19.3)': dependencies: '@storybook/core-webpack': 9.1.13(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))) - '@storybook/react-docgen-typescript-plugin': 1.0.6--canary.9.0c3f3b7.0(typescript@5.9.3)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + '@storybook/react-docgen-typescript-plugin': 1.0.6--canary.9.0c3f3b7.0(typescript@5.9.3)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) '@types/semver': 7.7.1 find-up: 7.0.0 magic-string: 0.30.21 @@ -11665,7 +11435,7 @@ snapshots: semver: 7.7.3 storybook: 9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) tsconfig-paths: 4.2.0 - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) optionalDependencies: typescript: 5.9.3 transitivePeerDependencies: @@ -11675,7 +11445,7 @@ snapshots: - uglify-js - webpack-cli - '@storybook/react-docgen-typescript-plugin@1.0.6--canary.9.0c3f3b7.0(typescript@5.9.3)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3))': + '@storybook/react-docgen-typescript-plugin@1.0.6--canary.9.0c3f3b7.0(typescript@5.9.3)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3))': dependencies: debug: 4.4.3 endent: 2.1.0 @@ -11685,7 +11455,7 @@ snapshots: react-docgen-typescript: 2.4.0(typescript@5.9.3) tslib: 2.8.1 typescript: 5.9.3 - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) transitivePeerDependencies: - supports-color @@ -12865,12 +12635,12 @@ snapshots: postcss: 8.5.6 postcss-value-parser: 4.2.0 - babel-loader@9.2.1(@babel/core@7.28.5)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)): + babel-loader@9.2.1(@babel/core@7.28.5)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: '@babel/core': 7.28.5 find-cache-dir: 4.0.0 schema-utils: 4.3.3 - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) babel-plugin-polyfill-corejs2@0.4.14(@babel/core@7.28.5): dependencies: @@ -13346,7 +13116,7 @@ snapshots: randombytes: 2.1.0 randomfill: 1.0.4 - css-loader@6.11.0(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)): + css-loader@6.11.0(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: icss-utils: 5.1.0(postcss@8.5.6) postcss: 8.5.6 @@ -13357,7 +13127,7 @@ snapshots: postcss-value-parser: 4.2.0 semver: 7.7.3 optionalDependencies: - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) css-mediaquery@0.1.2: {} @@ -13778,71 +13548,43 @@ snapshots: esast-util-from-estree: 2.0.0 vfile-message: 4.0.3 - esbuild-register@3.6.0(esbuild@0.25.0): + esbuild-register@3.6.0(esbuild@0.27.2): dependencies: debug: 4.4.3 - esbuild: 0.25.0 + esbuild: 0.27.2 transitivePeerDependencies: - supports-color esbuild-wasm@0.27.2: {} - esbuild@0.25.0: + esbuild@0.27.2: optionalDependencies: - '@esbuild/aix-ppc64': 0.25.0 - '@esbuild/android-arm': 0.25.0 - '@esbuild/android-arm64': 0.25.0 - '@esbuild/android-x64': 0.25.0 - '@esbuild/darwin-arm64': 0.25.0 - '@esbuild/darwin-x64': 0.25.0 - '@esbuild/freebsd-arm64': 0.25.0 - '@esbuild/freebsd-x64': 0.25.0 - '@esbuild/linux-arm': 0.25.0 - '@esbuild/linux-arm64': 0.25.0 - '@esbuild/linux-ia32': 0.25.0 - '@esbuild/linux-loong64': 0.25.0 - '@esbuild/linux-mips64el': 0.25.0 - '@esbuild/linux-ppc64': 0.25.0 - '@esbuild/linux-riscv64': 0.25.0 - '@esbuild/linux-s390x': 0.25.0 - '@esbuild/linux-x64': 0.25.0 - '@esbuild/netbsd-arm64': 0.25.0 - '@esbuild/netbsd-x64': 0.25.0 - '@esbuild/openbsd-arm64': 0.25.0 - '@esbuild/openbsd-x64': 0.25.0 - '@esbuild/sunos-x64': 0.25.0 - '@esbuild/win32-arm64': 0.25.0 - '@esbuild/win32-ia32': 0.25.0 - '@esbuild/win32-x64': 0.25.0 - - esbuild@0.25.12: - optionalDependencies: - '@esbuild/aix-ppc64': 0.25.12 - '@esbuild/android-arm': 0.25.12 - '@esbuild/android-arm64': 0.25.12 - '@esbuild/android-x64': 0.25.12 - '@esbuild/darwin-arm64': 0.25.12 - '@esbuild/darwin-x64': 0.25.12 - '@esbuild/freebsd-arm64': 0.25.12 - '@esbuild/freebsd-x64': 0.25.12 - '@esbuild/linux-arm': 0.25.12 - '@esbuild/linux-arm64': 0.25.12 - '@esbuild/linux-ia32': 0.25.12 - '@esbuild/linux-loong64': 0.25.12 - '@esbuild/linux-mips64el': 0.25.12 - '@esbuild/linux-ppc64': 0.25.12 - '@esbuild/linux-riscv64': 0.25.12 - '@esbuild/linux-s390x': 0.25.12 - '@esbuild/linux-x64': 0.25.12 - '@esbuild/netbsd-arm64': 0.25.12 - '@esbuild/netbsd-x64': 0.25.12 - '@esbuild/openbsd-arm64': 0.25.12 - '@esbuild/openbsd-x64': 0.25.12 - '@esbuild/openharmony-arm64': 0.25.12 - '@esbuild/sunos-x64': 0.25.12 - '@esbuild/win32-arm64': 0.25.12 - '@esbuild/win32-ia32': 0.25.12 - '@esbuild/win32-x64': 0.25.12 + '@esbuild/aix-ppc64': 0.27.2 + '@esbuild/android-arm': 0.27.2 + '@esbuild/android-arm64': 0.27.2 + '@esbuild/android-x64': 0.27.2 + '@esbuild/darwin-arm64': 0.27.2 + '@esbuild/darwin-x64': 0.27.2 + '@esbuild/freebsd-arm64': 0.27.2 + '@esbuild/freebsd-x64': 0.27.2 + '@esbuild/linux-arm': 0.27.2 + '@esbuild/linux-arm64': 0.27.2 + '@esbuild/linux-ia32': 0.27.2 + '@esbuild/linux-loong64': 0.27.2 + '@esbuild/linux-mips64el': 0.27.2 + '@esbuild/linux-ppc64': 0.27.2 + '@esbuild/linux-riscv64': 0.27.2 + '@esbuild/linux-s390x': 0.27.2 + '@esbuild/linux-x64': 0.27.2 + '@esbuild/netbsd-arm64': 0.27.2 + '@esbuild/netbsd-x64': 0.27.2 + '@esbuild/openbsd-arm64': 0.27.2 + '@esbuild/openbsd-x64': 0.27.2 + '@esbuild/openharmony-arm64': 0.27.2 + '@esbuild/sunos-x64': 0.27.2 + '@esbuild/win32-arm64': 0.27.2 + '@esbuild/win32-ia32': 0.27.2 + '@esbuild/win32-x64': 0.27.2 escalade@3.2.0: {} @@ -14456,7 +14198,7 @@ snapshots: cross-spawn: 7.0.6 signal-exit: 4.1.0 - fork-ts-checker-webpack-plugin@8.0.0(typescript@5.9.3)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)): + fork-ts-checker-webpack-plugin@8.0.0(typescript@5.9.3)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: '@babel/code-frame': 7.27.1 chalk: 4.1.2 @@ -14471,7 +14213,7 @@ snapshots: semver: 7.7.3 tapable: 2.3.0 typescript: 5.9.3 - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) format@0.2.2: {} @@ -14786,7 +14528,7 @@ snapshots: html-void-elements@3.0.0: {} - html-webpack-plugin@5.6.5(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)): + html-webpack-plugin@5.6.5(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: '@types/html-minifier-terser': 6.1.0 html-minifier-terser: 6.1.0 @@ -14794,7 +14536,7 @@ snapshots: pretty-error: 4.0.0 tapable: 2.3.0 optionalDependencies: - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) htmlparser2@6.1.0: dependencies: @@ -15944,7 +15686,7 @@ snapshots: node-addon-api@7.1.1: optional: true - node-polyfill-webpack-plugin@2.0.1(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)): + node-polyfill-webpack-plugin@2.0.1(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: assert: '@nolyfill/assert@1.0.26' browserify-zlib: 0.2.0 @@ -15971,7 +15713,7 @@ snapshots: url: 0.11.4 util: 0.12.5 vm-browserify: 1.1.2 - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) node-releases@2.0.27: {} @@ -16287,14 +16029,14 @@ snapshots: tsx: 4.21.0 yaml: 2.8.2 - postcss-loader@8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)): + postcss-loader@8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: cosmiconfig: 9.0.0(typescript@5.9.3) jiti: 2.6.1 postcss: 8.5.6 semver: 7.7.3 optionalDependencies: - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) transitivePeerDependencies: - typescript @@ -16612,7 +16354,7 @@ snapshots: '@rollup/pluginutils': 5.3.0(rollup@4.53.5) '@types/node': 20.19.26 bippy: 0.3.34(@types/react@19.2.7)(react@19.2.3) - esbuild: 0.25.12 + esbuild: 0.27.2 estree-walker: 3.0.3 kleur: 4.1.5 mri: 1.2.0 @@ -16995,12 +16737,12 @@ snapshots: safe-buffer@5.2.1: {} - sass-loader@16.0.6(sass@1.95.0)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)): + sass-loader@16.0.6(sass@1.95.0)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: neo-async: 2.6.2 optionalDependencies: sass: 1.95.0 - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) sass@1.95.0: dependencies: @@ -17225,8 +16967,8 @@ snapshots: '@vitest/mocker': 3.2.4(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) '@vitest/spy': 3.2.4 better-opn: 3.0.2 - esbuild: 0.25.0 - esbuild-register: 3.6.0(esbuild@0.25.0) + esbuild: 0.27.2 + esbuild-register: 3.6.0(esbuild@0.27.2) recast: 0.23.11 semver: 7.7.3 ws: 8.18.3 @@ -17300,9 +17042,9 @@ snapshots: strip-json-comments@5.0.3: {} - style-loader@3.3.4(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)): + style-loader@3.3.4(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) style-to-js@1.1.21: dependencies: @@ -17405,16 +17147,16 @@ snapshots: readable-stream: 3.6.2 optional: true - terser-webpack-plugin@5.3.15(esbuild@0.25.0)(uglify-js@3.19.3)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)): + terser-webpack-plugin@5.3.15(esbuild@0.27.2)(uglify-js@3.19.3)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: '@jridgewell/trace-mapping': 0.3.31 jest-worker: 27.5.1 schema-utils: 4.3.3 serialize-javascript: 6.0.2 terser: 5.44.1 - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) optionalDependencies: - esbuild: 0.25.0 + esbuild: 0.27.2 uglify-js: 3.19.3 terser@5.44.1: @@ -17542,7 +17284,7 @@ snapshots: tsx@4.21.0: dependencies: - esbuild: 0.25.12 + esbuild: 0.27.2 get-tsconfig: 4.13.0 optionalDependencies: fsevents: 2.3.3 @@ -17750,7 +17492,7 @@ snapshots: vite@6.4.1(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2): dependencies: - esbuild: 0.25.12 + esbuild: 0.27.2 fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 postcss: 8.5.6 @@ -17767,7 +17509,7 @@ snapshots: vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2): dependencies: - esbuild: 0.25.12 + esbuild: 0.27.2 fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 postcss: 8.5.6 @@ -17892,7 +17634,7 @@ snapshots: - bufferutil - utf-8-validate - webpack-dev-middleware@6.1.3(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)): + webpack-dev-middleware@6.1.3(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: colorette: 2.0.20 memfs: 3.5.3 @@ -17900,7 +17642,7 @@ snapshots: range-parser: 1.2.1 schema-utils: 4.3.3 optionalDependencies: - webpack: 5.103.0(esbuild@0.25.0)(uglify-js@3.19.3) + webpack: 5.103.0(esbuild@0.27.2)(uglify-js@3.19.3) webpack-hot-middleware@2.26.1: dependencies: @@ -17912,7 +17654,7 @@ snapshots: webpack-virtual-modules@0.6.2: {} - webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3): + webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3): dependencies: '@types/eslint-scope': 3.7.7 '@types/estree': 1.0.8 @@ -17936,7 +17678,7 @@ snapshots: neo-async: 2.6.2 schema-utils: 4.3.3 tapable: 2.3.0 - terser-webpack-plugin: 5.3.15(esbuild@0.25.0)(uglify-js@3.19.3)(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3)) + terser-webpack-plugin: 5.3.15(esbuild@0.27.2)(uglify-js@3.19.3)(webpack@5.103.0(esbuild@0.27.2)(uglify-js@3.19.3)) watchpack: 2.4.4 webpack-sources: 3.3.3 transitivePeerDependencies: From 51ea87ab85be8ab7d7c26558f4bb3cfa6fd9928d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9D=9E=E6=B3=95=E6=93=8D=E4=BD=9C?= Date: Mon, 12 Jan 2026 15:57:40 +0800 Subject: [PATCH 3/4] feat: clear free plan workflow run logs (#29494) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> --- api/.env.example | 1 + api/commands.py | 57 +++ api/configs/feature/__init__.py | 4 + api/core/workflow/enums.py | 4 + api/extensions/ext_celery.py | 7 + api/extensions/ext_commands.py | 2 + .../versions/2026_01_09_1630-905527cc8fd3_.py | 30 ++ api/models/workflow.py | 1 + .../api_workflow_run_repository.py | 43 ++- ..._api_workflow_node_execution_repository.py | 147 +++++++- .../sqlalchemy_api_workflow_run_repository.py | 179 +++++++++- ...alchemy_workflow_trigger_log_repository.py | 38 +- .../workflow_trigger_log_repository.py | 12 + api/schedule/clean_workflow_runs_task.py | 43 +++ api/services/retention/__init__.py | 0 .../retention/workflow_run/__init__.py | 0 ...ear_free_plan_expired_workflow_run_logs.py | 301 ++++++++++++++++ ..._sqlalchemy_api_workflow_run_repository.py | 92 +++++ ...alchemy_workflow_trigger_log_repository.py | 31 ++ ...ear_free_plan_expired_workflow_run_logs.py | 327 ++++++++++++++++++ docker/.env.example | 1 + docker/docker-compose.yaml | 1 + 22 files changed, 1312 insertions(+), 9 deletions(-) create mode 100644 api/migrations/versions/2026_01_09_1630-905527cc8fd3_.py create mode 100644 api/schedule/clean_workflow_runs_task.py create mode 100644 api/services/retention/__init__.py create mode 100644 api/services/retention/workflow_run/__init__.py create mode 100644 api/services/retention/workflow_run/clear_free_plan_expired_workflow_run_logs.py create mode 100644 api/tests/unit_tests/repositories/test_sqlalchemy_workflow_trigger_log_repository.py create mode 100644 api/tests/unit_tests/services/test_clear_free_plan_expired_workflow_run_logs.py diff --git a/api/.env.example b/api/.env.example index 44d770ed70..8099c4a42a 100644 --- a/api/.env.example +++ b/api/.env.example @@ -589,6 +589,7 @@ ENABLE_CLEAN_UNUSED_DATASETS_TASK=false ENABLE_CREATE_TIDB_SERVERLESS_TASK=false ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false ENABLE_CLEAN_MESSAGES=false +ENABLE_WORKFLOW_RUN_CLEANUP_TASK=false ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false ENABLE_DATASETS_QUEUE_MONITOR=false ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true diff --git a/api/commands.py b/api/commands.py index 7ebf5b4874..e24b1826ee 100644 --- a/api/commands.py +++ b/api/commands.py @@ -1,4 +1,5 @@ import base64 +import datetime import json import logging import secrets @@ -45,6 +46,7 @@ from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpi from services.plugin.data_migration import PluginDataMigration from services.plugin.plugin_migration import PluginMigration from services.plugin.plugin_service import PluginService +from services.retention.workflow_run.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup from tasks.remove_app_and_related_data_task import delete_draft_variables_batch logger = logging.getLogger(__name__) @@ -852,6 +854,61 @@ def clear_free_plan_tenant_expired_logs(days: int, batch: int, tenant_ids: list[ click.echo(click.style("Clear free plan tenant expired logs completed.", fg="green")) +@click.command("clean-workflow-runs", help="Clean expired workflow runs and related data for free tenants.") +@click.option("--days", default=30, show_default=True, help="Delete workflow runs created before N days ago.") +@click.option("--batch-size", default=200, show_default=True, help="Batch size for selecting workflow runs.") +@click.option( + "--start-from", + type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]), + default=None, + help="Optional lower bound (inclusive) for created_at; must be paired with --end-before.", +) +@click.option( + "--end-before", + type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]), + default=None, + help="Optional upper bound (exclusive) for created_at; must be paired with --start-from.", +) +@click.option( + "--dry-run", + is_flag=True, + help="Preview cleanup results without deleting any workflow run data.", +) +def clean_workflow_runs( + days: int, + batch_size: int, + start_from: datetime.datetime | None, + end_before: datetime.datetime | None, + dry_run: bool, +): + """ + Clean workflow runs and related workflow data for free tenants. + """ + if (start_from is None) ^ (end_before is None): + raise click.UsageError("--start-from and --end-before must be provided together.") + + start_time = datetime.datetime.now(datetime.UTC) + click.echo(click.style(f"Starting workflow run cleanup at {start_time.isoformat()}.", fg="white")) + + WorkflowRunCleanup( + days=days, + batch_size=batch_size, + start_from=start_from, + end_before=end_before, + dry_run=dry_run, + ).run() + + end_time = datetime.datetime.now(datetime.UTC) + elapsed = end_time - start_time + click.echo( + click.style( + f"Workflow run cleanup completed. start={start_time.isoformat()} " + f"end={end_time.isoformat()} duration={elapsed}", + fg="green", + ) + ) + + @click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.") @click.command("clear-orphaned-file-records", help="Clear orphaned file records.") def clear_orphaned_file_records(force: bool): diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 6a04171d2d..cf855b1cc0 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -1101,6 +1101,10 @@ class CeleryScheduleTasksConfig(BaseSettings): description="Enable clean messages task", default=False, ) + ENABLE_WORKFLOW_RUN_CLEANUP_TASK: bool = Field( + description="Enable scheduled workflow run cleanup task", + default=False, + ) ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: bool = Field( description="Enable mail clean document notify task", default=False, diff --git a/api/core/workflow/enums.py b/api/core/workflow/enums.py index c08b62a253..bb3b13e8c6 100644 --- a/api/core/workflow/enums.py +++ b/api/core/workflow/enums.py @@ -211,6 +211,10 @@ class WorkflowExecutionStatus(StrEnum): def is_ended(self) -> bool: return self in _END_STATE + @classmethod + def ended_values(cls) -> list[str]: + return [status.value for status in _END_STATE] + _END_STATE = frozenset( [ diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 2fbab001d0..08cf96c1c1 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -163,6 +163,13 @@ def init_app(app: DifyApp) -> Celery: "task": "schedule.clean_workflow_runlogs_precise.clean_workflow_runlogs_precise", "schedule": crontab(minute="0", hour="2"), } + if dify_config.ENABLE_WORKFLOW_RUN_CLEANUP_TASK: + # for saas only + imports.append("schedule.clean_workflow_runs_task") + beat_schedule["clean_workflow_runs_task"] = { + "task": "schedule.clean_workflow_runs_task.clean_workflow_runs_task", + "schedule": crontab(minute="0", hour="0"), + } if dify_config.ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK: imports.append("schedule.workflow_schedule_task") beat_schedule["workflow_schedule_task"] = { diff --git a/api/extensions/ext_commands.py b/api/extensions/ext_commands.py index daa3756dba..c32130d377 100644 --- a/api/extensions/ext_commands.py +++ b/api/extensions/ext_commands.py @@ -4,6 +4,7 @@ from dify_app import DifyApp def init_app(app: DifyApp): from commands import ( add_qdrant_index, + clean_workflow_runs, cleanup_orphaned_draft_variables, clear_free_plan_tenant_expired_logs, clear_orphaned_file_records, @@ -56,6 +57,7 @@ def init_app(app: DifyApp): setup_datasource_oauth_client, transform_datasource_credentials, install_rag_pipeline_plugins, + clean_workflow_runs, ] for cmd in cmds_to_register: app.cli.add_command(cmd) diff --git a/api/migrations/versions/2026_01_09_1630-905527cc8fd3_.py b/api/migrations/versions/2026_01_09_1630-905527cc8fd3_.py new file mode 100644 index 0000000000..7e0cc8ec9d --- /dev/null +++ b/api/migrations/versions/2026_01_09_1630-905527cc8fd3_.py @@ -0,0 +1,30 @@ +"""add workflow_run_created_at_id_idx + +Revision ID: 905527cc8fd3 +Revises: 7df29de0f6be +Create Date: 2025-01-09 16:30:02.462084 + +""" +from alembic import op +import models as models + +# revision identifiers, used by Alembic. +revision = '905527cc8fd3' +down_revision = '7df29de0f6be' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_runs', schema=None) as batch_op: + batch_op.create_index('workflow_run_created_at_id_idx', ['created_at', 'id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_runs', schema=None) as batch_op: + batch_op.drop_index('workflow_run_created_at_id_idx') + # ### end Alembic commands ### diff --git a/api/models/workflow.py b/api/models/workflow.py index a18939523b..072c6100b5 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -597,6 +597,7 @@ class WorkflowRun(Base): __table_args__ = ( sa.PrimaryKeyConstraint("id", name="workflow_run_pkey"), sa.Index("workflow_run_triggerd_from_idx", "tenant_id", "app_id", "triggered_from"), + sa.Index("workflow_run_created_at_id_idx", "created_at", "id"), ) id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) diff --git a/api/repositories/api_workflow_run_repository.py b/api/repositories/api_workflow_run_repository.py index fd547c78ba..1a2b84fdf9 100644 --- a/api/repositories/api_workflow_run_repository.py +++ b/api/repositories/api_workflow_run_repository.py @@ -34,11 +34,14 @@ Example: ``` """ -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import datetime from typing import Protocol +from sqlalchemy.orm import Session + from core.workflow.entities.pause_reason import PauseReason +from core.workflow.enums import WorkflowType from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from libs.infinite_scroll_pagination import InfiniteScrollPagination from models.enums import WorkflowRunTriggeredFrom @@ -253,6 +256,44 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol): """ ... + def get_runs_batch_by_time_range( + self, + start_from: datetime | None, + end_before: datetime, + last_seen: tuple[datetime, str] | None, + batch_size: int, + run_types: Sequence[WorkflowType] | None = None, + tenant_ids: Sequence[str] | None = None, + ) -> Sequence[WorkflowRun]: + """ + Fetch ended workflow runs in a time window for archival and clean batching. + """ + ... + + def delete_runs_with_related( + self, + runs: Sequence[WorkflowRun], + delete_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None, + delete_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None, + ) -> dict[str, int]: + """ + Delete workflow runs and their related records (node executions, offloads, app logs, + trigger logs, pauses, pause reasons). + """ + ... + + def count_runs_with_related( + self, + runs: Sequence[WorkflowRun], + count_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None, + count_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None, + ) -> dict[str, int]: + """ + Count workflow runs and their related records (node executions, offloads, app logs, + trigger logs, pauses, pause reasons) without deleting data. + """ + ... + def create_workflow_pause( self, workflow_run_id: str, diff --git a/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py b/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py index 7e2173acdd..2de3a15d65 100644 --- a/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py @@ -7,13 +7,18 @@ using SQLAlchemy 2.0 style queries for WorkflowNodeExecutionModel operations. from collections.abc import Sequence from datetime import datetime -from typing import cast +from typing import TypedDict, cast -from sqlalchemy import asc, delete, desc, select +from sqlalchemy import asc, delete, desc, func, select, tuple_ from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session, sessionmaker -from models.workflow import WorkflowNodeExecutionModel +from models.enums import WorkflowRunTriggeredFrom +from models.workflow import ( + WorkflowNodeExecutionModel, + WorkflowNodeExecutionOffload, + WorkflowNodeExecutionTriggeredFrom, +) from repositories.api_workflow_node_execution_repository import DifyAPIWorkflowNodeExecutionRepository @@ -44,6 +49,26 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut """ self._session_maker = session_maker + @staticmethod + def _map_run_triggered_from_to_node_triggered_from(triggered_from: str) -> str: + """ + Map workflow run triggered_from values to workflow node execution triggered_from values. + """ + if triggered_from in { + WorkflowRunTriggeredFrom.APP_RUN.value, + WorkflowRunTriggeredFrom.DEBUGGING.value, + WorkflowRunTriggeredFrom.SCHEDULE.value, + WorkflowRunTriggeredFrom.PLUGIN.value, + WorkflowRunTriggeredFrom.WEBHOOK.value, + }: + return WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value + if triggered_from in { + WorkflowRunTriggeredFrom.RAG_PIPELINE_RUN.value, + WorkflowRunTriggeredFrom.RAG_PIPELINE_DEBUGGING.value, + }: + return WorkflowNodeExecutionTriggeredFrom.RAG_PIPELINE_RUN.value + return "" + def get_node_last_execution( self, tenant_id: str, @@ -290,3 +315,119 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut result = cast(CursorResult, session.execute(stmt)) session.commit() return result.rowcount + + class RunContext(TypedDict): + run_id: str + tenant_id: str + app_id: str + workflow_id: str + triggered_from: str + + @staticmethod + def delete_by_runs(session: Session, runs: Sequence[RunContext]) -> tuple[int, int]: + """ + Delete node executions (and offloads) for the given workflow runs using indexed columns. + + Uses the composite index on (tenant_id, app_id, workflow_id, triggered_from, workflow_run_id) + by filtering on those columns with tuple IN. + """ + if not runs: + return 0, 0 + + tuple_values = [ + ( + run["tenant_id"], + run["app_id"], + run["workflow_id"], + DifyAPISQLAlchemyWorkflowNodeExecutionRepository._map_run_triggered_from_to_node_triggered_from( + run["triggered_from"] + ), + run["run_id"], + ) + for run in runs + ] + + node_execution_ids = session.scalars( + select(WorkflowNodeExecutionModel.id).where( + tuple_( + WorkflowNodeExecutionModel.tenant_id, + WorkflowNodeExecutionModel.app_id, + WorkflowNodeExecutionModel.workflow_id, + WorkflowNodeExecutionModel.triggered_from, + WorkflowNodeExecutionModel.workflow_run_id, + ).in_(tuple_values) + ) + ).all() + + if not node_execution_ids: + return 0, 0 + + offloads_deleted = ( + cast( + CursorResult, + session.execute( + delete(WorkflowNodeExecutionOffload).where( + WorkflowNodeExecutionOffload.node_execution_id.in_(node_execution_ids) + ) + ), + ).rowcount + or 0 + ) + + node_executions_deleted = ( + cast( + CursorResult, + session.execute( + delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(node_execution_ids)) + ), + ).rowcount + or 0 + ) + + return node_executions_deleted, offloads_deleted + + @staticmethod + def count_by_runs(session: Session, runs: Sequence[RunContext]) -> tuple[int, int]: + """ + Count node executions (and offloads) for the given workflow runs using indexed columns. + """ + if not runs: + return 0, 0 + + tuple_values = [ + ( + run["tenant_id"], + run["app_id"], + run["workflow_id"], + DifyAPISQLAlchemyWorkflowNodeExecutionRepository._map_run_triggered_from_to_node_triggered_from( + run["triggered_from"] + ), + run["run_id"], + ) + for run in runs + ] + tuple_filter = tuple_( + WorkflowNodeExecutionModel.tenant_id, + WorkflowNodeExecutionModel.app_id, + WorkflowNodeExecutionModel.workflow_id, + WorkflowNodeExecutionModel.triggered_from, + WorkflowNodeExecutionModel.workflow_run_id, + ).in_(tuple_values) + + node_executions_count = ( + session.scalar(select(func.count()).select_from(WorkflowNodeExecutionModel).where(tuple_filter)) or 0 + ) + offloads_count = ( + session.scalar( + select(func.count()) + .select_from(WorkflowNodeExecutionOffload) + .join( + WorkflowNodeExecutionModel, + WorkflowNodeExecutionOffload.node_execution_id == WorkflowNodeExecutionModel.id, + ) + .where(tuple_filter) + ) + or 0 + ) + + return int(node_executions_count), int(offloads_count) diff --git a/api/repositories/sqlalchemy_api_workflow_run_repository.py b/api/repositories/sqlalchemy_api_workflow_run_repository.py index b172c6a3ac..9d2d06e99f 100644 --- a/api/repositories/sqlalchemy_api_workflow_run_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_run_repository.py @@ -21,7 +21,7 @@ Implementation Notes: import logging import uuid -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import datetime from decimal import Decimal from typing import Any, cast @@ -32,7 +32,7 @@ from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session, selectinload, sessionmaker from core.workflow.entities.pause_reason import HumanInputRequired, PauseReason, SchedulingPause -from core.workflow.enums import WorkflowExecutionStatus +from core.workflow.enums import WorkflowExecutionStatus, WorkflowType from extensions.ext_storage import storage from libs.datetime_utils import naive_utc_now from libs.helper import convert_datetime_to_date @@ -40,8 +40,14 @@ from libs.infinite_scroll_pagination import InfiniteScrollPagination from libs.time_parser import get_time_threshold from libs.uuid_utils import uuidv7 from models.enums import WorkflowRunTriggeredFrom -from models.workflow import WorkflowPause as WorkflowPauseModel -from models.workflow import WorkflowPauseReason, WorkflowRun +from models.workflow import ( + WorkflowAppLog, + WorkflowPauseReason, + WorkflowRun, +) +from models.workflow import ( + WorkflowPause as WorkflowPauseModel, +) from repositories.api_workflow_run_repository import APIWorkflowRunRepository from repositories.entities.workflow_pause import WorkflowPauseEntity from repositories.types import ( @@ -314,6 +320,171 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): logger.info("Total deleted %s workflow runs for app %s", total_deleted, app_id) return total_deleted + def get_runs_batch_by_time_range( + self, + start_from: datetime | None, + end_before: datetime, + last_seen: tuple[datetime, str] | None, + batch_size: int, + run_types: Sequence[WorkflowType] | None = None, + tenant_ids: Sequence[str] | None = None, + ) -> Sequence[WorkflowRun]: + """ + Fetch ended workflow runs in a time window for archival and clean batching. + + Query scope: + - created_at in [start_from, end_before) + - type in run_types (when provided) + - status is an ended state + - optional tenant_id filter and cursor (last_seen) for pagination + """ + with self._session_maker() as session: + stmt = ( + select(WorkflowRun) + .where( + WorkflowRun.created_at < end_before, + WorkflowRun.status.in_(WorkflowExecutionStatus.ended_values()), + ) + .order_by(WorkflowRun.created_at.asc(), WorkflowRun.id.asc()) + .limit(batch_size) + ) + if run_types is not None: + if not run_types: + return [] + stmt = stmt.where(WorkflowRun.type.in_(run_types)) + + if start_from: + stmt = stmt.where(WorkflowRun.created_at >= start_from) + + if tenant_ids: + stmt = stmt.where(WorkflowRun.tenant_id.in_(tenant_ids)) + + if last_seen: + stmt = stmt.where( + or_( + WorkflowRun.created_at > last_seen[0], + and_(WorkflowRun.created_at == last_seen[0], WorkflowRun.id > last_seen[1]), + ) + ) + + return session.scalars(stmt).all() + + def delete_runs_with_related( + self, + runs: Sequence[WorkflowRun], + delete_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None, + delete_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None, + ) -> dict[str, int]: + if not runs: + return { + "runs": 0, + "node_executions": 0, + "offloads": 0, + "app_logs": 0, + "trigger_logs": 0, + "pauses": 0, + "pause_reasons": 0, + } + + with self._session_maker() as session: + run_ids = [run.id for run in runs] + if delete_node_executions: + node_executions_deleted, offloads_deleted = delete_node_executions(session, runs) + else: + node_executions_deleted, offloads_deleted = 0, 0 + + app_logs_result = session.execute(delete(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(run_ids))) + app_logs_deleted = cast(CursorResult, app_logs_result).rowcount or 0 + + pause_ids = session.scalars( + select(WorkflowPauseModel.id).where(WorkflowPauseModel.workflow_run_id.in_(run_ids)) + ).all() + pause_reasons_deleted = 0 + pauses_deleted = 0 + + if pause_ids: + pause_reasons_result = session.execute( + delete(WorkflowPauseReason).where(WorkflowPauseReason.pause_id.in_(pause_ids)) + ) + pause_reasons_deleted = cast(CursorResult, pause_reasons_result).rowcount or 0 + pauses_result = session.execute(delete(WorkflowPauseModel).where(WorkflowPauseModel.id.in_(pause_ids))) + pauses_deleted = cast(CursorResult, pauses_result).rowcount or 0 + + trigger_logs_deleted = delete_trigger_logs(session, run_ids) if delete_trigger_logs else 0 + + runs_result = session.execute(delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids))) + runs_deleted = cast(CursorResult, runs_result).rowcount or 0 + + session.commit() + + return { + "runs": runs_deleted, + "node_executions": node_executions_deleted, + "offloads": offloads_deleted, + "app_logs": app_logs_deleted, + "trigger_logs": trigger_logs_deleted, + "pauses": pauses_deleted, + "pause_reasons": pause_reasons_deleted, + } + + def count_runs_with_related( + self, + runs: Sequence[WorkflowRun], + count_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None, + count_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None, + ) -> dict[str, int]: + if not runs: + return { + "runs": 0, + "node_executions": 0, + "offloads": 0, + "app_logs": 0, + "trigger_logs": 0, + "pauses": 0, + "pause_reasons": 0, + } + + with self._session_maker() as session: + run_ids = [run.id for run in runs] + if count_node_executions: + node_executions_count, offloads_count = count_node_executions(session, runs) + else: + node_executions_count, offloads_count = 0, 0 + + app_logs_count = ( + session.scalar( + select(func.count()).select_from(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(run_ids)) + ) + or 0 + ) + + pause_ids = session.scalars( + select(WorkflowPauseModel.id).where(WorkflowPauseModel.workflow_run_id.in_(run_ids)) + ).all() + pauses_count = len(pause_ids) + pause_reasons_count = 0 + if pause_ids: + pause_reasons_count = ( + session.scalar( + select(func.count()) + .select_from(WorkflowPauseReason) + .where(WorkflowPauseReason.pause_id.in_(pause_ids)) + ) + or 0 + ) + + trigger_logs_count = count_trigger_logs(session, run_ids) if count_trigger_logs else 0 + + return { + "runs": len(runs), + "node_executions": node_executions_count, + "offloads": offloads_count, + "app_logs": int(app_logs_count), + "trigger_logs": trigger_logs_count, + "pauses": pauses_count, + "pause_reasons": int(pause_reasons_count), + } + def create_workflow_pause( self, workflow_run_id: str, diff --git a/api/repositories/sqlalchemy_workflow_trigger_log_repository.py b/api/repositories/sqlalchemy_workflow_trigger_log_repository.py index 0d67e286b0..ebd3745d18 100644 --- a/api/repositories/sqlalchemy_workflow_trigger_log_repository.py +++ b/api/repositories/sqlalchemy_workflow_trigger_log_repository.py @@ -4,8 +4,10 @@ SQLAlchemy implementation of WorkflowTriggerLogRepository. from collections.abc import Sequence from datetime import UTC, datetime, timedelta +from typing import cast -from sqlalchemy import and_, select +from sqlalchemy import and_, delete, func, select +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session from models.enums import WorkflowTriggerStatus @@ -84,3 +86,37 @@ class SQLAlchemyWorkflowTriggerLogRepository(WorkflowTriggerLogRepository): ) return list(self.session.scalars(query).all()) + + def delete_by_run_ids(self, run_ids: Sequence[str]) -> int: + """ + Delete trigger logs associated with the given workflow run ids. + + Args: + run_ids: Collection of workflow run identifiers. + + Returns: + Number of rows deleted. + """ + if not run_ids: + return 0 + + result = self.session.execute(delete(WorkflowTriggerLog).where(WorkflowTriggerLog.workflow_run_id.in_(run_ids))) + return cast(CursorResult, result).rowcount or 0 + + def count_by_run_ids(self, run_ids: Sequence[str]) -> int: + """ + Count trigger logs associated with the given workflow run ids. + + Args: + run_ids: Collection of workflow run identifiers. + + Returns: + Number of rows matched. + """ + if not run_ids: + return 0 + + count = self.session.scalar( + select(func.count()).select_from(WorkflowTriggerLog).where(WorkflowTriggerLog.workflow_run_id.in_(run_ids)) + ) + return int(count or 0) diff --git a/api/repositories/workflow_trigger_log_repository.py b/api/repositories/workflow_trigger_log_repository.py index 138b8779ac..b0009e398d 100644 --- a/api/repositories/workflow_trigger_log_repository.py +++ b/api/repositories/workflow_trigger_log_repository.py @@ -109,3 +109,15 @@ class WorkflowTriggerLogRepository(Protocol): A sequence of recent WorkflowTriggerLog instances """ ... + + def delete_by_run_ids(self, run_ids: Sequence[str]) -> int: + """ + Delete trigger logs for workflow run IDs. + + Args: + run_ids: Workflow run IDs to delete + + Returns: + Number of rows deleted + """ + ... diff --git a/api/schedule/clean_workflow_runs_task.py b/api/schedule/clean_workflow_runs_task.py new file mode 100644 index 0000000000..9f5bf8e150 --- /dev/null +++ b/api/schedule/clean_workflow_runs_task.py @@ -0,0 +1,43 @@ +from datetime import UTC, datetime + +import click + +import app +from configs import dify_config +from services.retention.workflow_run.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup + + +@app.celery.task(queue="retention") +def clean_workflow_runs_task() -> None: + """ + Scheduled cleanup for workflow runs and related records (sandbox tenants only). + """ + click.echo( + click.style( + ( + "Scheduled workflow run cleanup starting: " + f"cutoff={dify_config.SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS} days, " + f"batch={dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE}" + ), + fg="green", + ) + ) + + start_time = datetime.now(UTC) + + WorkflowRunCleanup( + days=dify_config.SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS, + batch_size=dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE, + start_from=None, + end_before=None, + ).run() + + end_time = datetime.now(UTC) + elapsed = end_time - start_time + click.echo( + click.style( + f"Scheduled workflow run cleanup finished. start={start_time.isoformat()} " + f"end={end_time.isoformat()} duration={elapsed}", + fg="green", + ) + ) diff --git a/api/services/retention/__init__.py b/api/services/retention/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/services/retention/workflow_run/__init__.py b/api/services/retention/workflow_run/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/services/retention/workflow_run/clear_free_plan_expired_workflow_run_logs.py b/api/services/retention/workflow_run/clear_free_plan_expired_workflow_run_logs.py new file mode 100644 index 0000000000..2213169510 --- /dev/null +++ b/api/services/retention/workflow_run/clear_free_plan_expired_workflow_run_logs.py @@ -0,0 +1,301 @@ +import datetime +import logging +from collections.abc import Iterable, Sequence + +import click +from sqlalchemy.orm import Session, sessionmaker + +from configs import dify_config +from enums.cloud_plan import CloudPlan +from extensions.ext_database import db +from models.workflow import WorkflowRun +from repositories.api_workflow_run_repository import APIWorkflowRunRepository +from repositories.sqlalchemy_api_workflow_node_execution_repository import ( + DifyAPISQLAlchemyWorkflowNodeExecutionRepository, +) +from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository +from services.billing_service import BillingService, SubscriptionPlan + +logger = logging.getLogger(__name__) + + +class WorkflowRunCleanup: + def __init__( + self, + days: int, + batch_size: int, + start_from: datetime.datetime | None = None, + end_before: datetime.datetime | None = None, + workflow_run_repo: APIWorkflowRunRepository | None = None, + dry_run: bool = False, + ): + if (start_from is None) ^ (end_before is None): + raise ValueError("start_from and end_before must be both set or both omitted.") + + computed_cutoff = datetime.datetime.now() - datetime.timedelta(days=days) + self.window_start = start_from + self.window_end = end_before or computed_cutoff + + if self.window_start and self.window_end <= self.window_start: + raise ValueError("end_before must be greater than start_from.") + + if batch_size <= 0: + raise ValueError("batch_size must be greater than 0.") + + self.batch_size = batch_size + self._cleanup_whitelist: set[str] | None = None + self.dry_run = dry_run + self.free_plan_grace_period_days = dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD + self.workflow_run_repo: APIWorkflowRunRepository + if workflow_run_repo: + self.workflow_run_repo = workflow_run_repo + else: + # Lazy import to avoid circular dependencies during module import + from repositories.factory import DifyAPIRepositoryFactory + + session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) + self.workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) + + def run(self) -> None: + click.echo( + click.style( + f"{'Inspecting' if self.dry_run else 'Cleaning'} workflow runs " + f"{'between ' + self.window_start.isoformat() + ' and ' if self.window_start else 'before '}" + f"{self.window_end.isoformat()} (batch={self.batch_size})", + fg="white", + ) + ) + if self.dry_run: + click.echo(click.style("Dry run mode enabled. No data will be deleted.", fg="yellow")) + + total_runs_deleted = 0 + total_runs_targeted = 0 + related_totals = self._empty_related_counts() if self.dry_run else None + batch_index = 0 + last_seen: tuple[datetime.datetime, str] | None = None + + while True: + run_rows = self.workflow_run_repo.get_runs_batch_by_time_range( + start_from=self.window_start, + end_before=self.window_end, + last_seen=last_seen, + batch_size=self.batch_size, + ) + if not run_rows: + break + + batch_index += 1 + last_seen = (run_rows[-1].created_at, run_rows[-1].id) + tenant_ids = {row.tenant_id for row in run_rows} + free_tenants = self._filter_free_tenants(tenant_ids) + free_runs = [row for row in run_rows if row.tenant_id in free_tenants] + paid_or_skipped = len(run_rows) - len(free_runs) + + if not free_runs: + click.echo( + click.style( + f"[batch #{batch_index}] skipped (no sandbox runs in batch, {paid_or_skipped} paid/unknown)", + fg="yellow", + ) + ) + continue + + total_runs_targeted += len(free_runs) + + if self.dry_run: + batch_counts = self.workflow_run_repo.count_runs_with_related( + free_runs, + count_node_executions=self._count_node_executions, + count_trigger_logs=self._count_trigger_logs, + ) + if related_totals is not None: + for key in related_totals: + related_totals[key] += batch_counts.get(key, 0) + sample_ids = ", ".join(run.id for run in free_runs[:5]) + click.echo( + click.style( + f"[batch #{batch_index}] would delete {len(free_runs)} runs " + f"(sample ids: {sample_ids}) and skip {paid_or_skipped} paid/unknown", + fg="yellow", + ) + ) + continue + + try: + counts = self.workflow_run_repo.delete_runs_with_related( + free_runs, + delete_node_executions=self._delete_node_executions, + delete_trigger_logs=self._delete_trigger_logs, + ) + except Exception: + logger.exception("Failed to delete workflow runs batch ending at %s", last_seen[0]) + raise + + total_runs_deleted += counts["runs"] + click.echo( + click.style( + f"[batch #{batch_index}] deleted runs: {counts['runs']} " + f"(nodes {counts['node_executions']}, offloads {counts['offloads']}, " + f"app_logs {counts['app_logs']}, trigger_logs {counts['trigger_logs']}, " + f"pauses {counts['pauses']}, pause_reasons {counts['pause_reasons']}); " + f"skipped {paid_or_skipped} paid/unknown", + fg="green", + ) + ) + + if self.dry_run: + if self.window_start: + summary_message = ( + f"Dry run complete. Would delete {total_runs_targeted} workflow runs " + f"between {self.window_start.isoformat()} and {self.window_end.isoformat()}" + ) + else: + summary_message = ( + f"Dry run complete. Would delete {total_runs_targeted} workflow runs " + f"before {self.window_end.isoformat()}" + ) + if related_totals is not None: + summary_message = f"{summary_message}; related records: {self._format_related_counts(related_totals)}" + summary_color = "yellow" + else: + if self.window_start: + summary_message = ( + f"Cleanup complete. Deleted {total_runs_deleted} workflow runs " + f"between {self.window_start.isoformat()} and {self.window_end.isoformat()}" + ) + else: + summary_message = ( + f"Cleanup complete. Deleted {total_runs_deleted} workflow runs before {self.window_end.isoformat()}" + ) + summary_color = "white" + + click.echo(click.style(summary_message, fg=summary_color)) + + def _filter_free_tenants(self, tenant_ids: Iterable[str]) -> set[str]: + tenant_id_list = list(tenant_ids) + + if not dify_config.BILLING_ENABLED: + return set(tenant_id_list) + + if not tenant_id_list: + return set() + + cleanup_whitelist = self._get_cleanup_whitelist() + + try: + bulk_info = BillingService.get_plan_bulk_with_cache(tenant_id_list) + except Exception: + bulk_info = {} + logger.exception("Failed to fetch billing plans in bulk for tenants: %s", tenant_id_list) + + eligible_free_tenants: set[str] = set() + for tenant_id in tenant_id_list: + if tenant_id in cleanup_whitelist: + continue + + info = bulk_info.get(tenant_id) + if info is None: + logger.warning("Missing billing info for tenant %s in bulk resp; treating as non-free", tenant_id) + continue + + if info.get("plan") != CloudPlan.SANDBOX: + continue + + if self._is_within_grace_period(tenant_id, info): + continue + + eligible_free_tenants.add(tenant_id) + + return eligible_free_tenants + + def _expiration_datetime(self, tenant_id: str, expiration_value: int) -> datetime.datetime | None: + if expiration_value < 0: + return None + + try: + return datetime.datetime.fromtimestamp(expiration_value, datetime.UTC) + except (OverflowError, OSError, ValueError): + logger.exception("Failed to parse expiration timestamp for tenant %s", tenant_id) + return None + + def _is_within_grace_period(self, tenant_id: str, info: SubscriptionPlan) -> bool: + if self.free_plan_grace_period_days <= 0: + return False + + expiration_value = info.get("expiration_date", -1) + expiration_at = self._expiration_datetime(tenant_id, expiration_value) + if expiration_at is None: + return False + + grace_deadline = expiration_at + datetime.timedelta(days=self.free_plan_grace_period_days) + return datetime.datetime.now(datetime.UTC) < grace_deadline + + def _get_cleanup_whitelist(self) -> set[str]: + if self._cleanup_whitelist is not None: + return self._cleanup_whitelist + + if not dify_config.BILLING_ENABLED: + self._cleanup_whitelist = set() + return self._cleanup_whitelist + + try: + whitelist_ids = BillingService.get_expired_subscription_cleanup_whitelist() + except Exception: + logger.exception("Failed to fetch cleanup whitelist from billing service") + whitelist_ids = [] + + self._cleanup_whitelist = set(whitelist_ids) + return self._cleanup_whitelist + + def _delete_trigger_logs(self, session: Session, run_ids: Sequence[str]) -> int: + trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session) + return trigger_repo.delete_by_run_ids(run_ids) + + def _count_trigger_logs(self, session: Session, run_ids: Sequence[str]) -> int: + trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session) + return trigger_repo.count_by_run_ids(run_ids) + + @staticmethod + def _build_run_contexts( + runs: Sequence[WorkflowRun], + ) -> list[DifyAPISQLAlchemyWorkflowNodeExecutionRepository.RunContext]: + return [ + { + "run_id": run.id, + "tenant_id": run.tenant_id, + "app_id": run.app_id, + "workflow_id": run.workflow_id, + "triggered_from": run.triggered_from, + } + for run in runs + ] + + @staticmethod + def _empty_related_counts() -> dict[str, int]: + return { + "node_executions": 0, + "offloads": 0, + "app_logs": 0, + "trigger_logs": 0, + "pauses": 0, + "pause_reasons": 0, + } + + @staticmethod + def _format_related_counts(counts: dict[str, int]) -> str: + return ( + f"node_executions {counts['node_executions']}, " + f"offloads {counts['offloads']}, " + f"app_logs {counts['app_logs']}, " + f"trigger_logs {counts['trigger_logs']}, " + f"pauses {counts['pauses']}, " + f"pause_reasons {counts['pause_reasons']}" + ) + + def _count_node_executions(self, session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]: + run_contexts = self._build_run_contexts(runs) + return DifyAPISQLAlchemyWorkflowNodeExecutionRepository.count_by_runs(session, run_contexts) + + def _delete_node_executions(self, session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]: + run_contexts = self._build_run_contexts(runs) + return DifyAPISQLAlchemyWorkflowNodeExecutionRepository.delete_by_runs(session, run_contexts) diff --git a/api/tests/unit_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py b/api/tests/unit_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py index 0c34676252..d443c4c9a5 100644 --- a/api/tests/unit_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py +++ b/api/tests/unit_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py @@ -4,6 +4,7 @@ from datetime import UTC, datetime from unittest.mock import Mock, patch import pytest +from sqlalchemy.dialects import postgresql from sqlalchemy.orm import Session, sessionmaker from core.workflow.enums import WorkflowExecutionStatus @@ -104,6 +105,42 @@ class TestDifyAPISQLAlchemyWorkflowRunRepository: return pause +class TestGetRunsBatchByTimeRange(TestDifyAPISQLAlchemyWorkflowRunRepository): + def test_get_runs_batch_by_time_range_filters_terminal_statuses( + self, repository: DifyAPISQLAlchemyWorkflowRunRepository, mock_session: Mock + ): + scalar_result = Mock() + scalar_result.all.return_value = [] + mock_session.scalars.return_value = scalar_result + + repository.get_runs_batch_by_time_range( + start_from=None, + end_before=datetime(2024, 1, 1), + last_seen=None, + batch_size=50, + ) + + stmt = mock_session.scalars.call_args[0][0] + compiled_sql = str( + stmt.compile( + dialect=postgresql.dialect(), + compile_kwargs={"literal_binds": True}, + ) + ) + + assert "workflow_runs.status" in compiled_sql + for status in ( + WorkflowExecutionStatus.SUCCEEDED, + WorkflowExecutionStatus.FAILED, + WorkflowExecutionStatus.STOPPED, + WorkflowExecutionStatus.PARTIAL_SUCCEEDED, + ): + assert f"'{status.value}'" in compiled_sql + + assert "'running'" not in compiled_sql + assert "'paused'" not in compiled_sql + + class TestCreateWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository): """Test create_workflow_pause method.""" @@ -181,6 +218,61 @@ class TestCreateWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository): ) +class TestDeleteRunsWithRelated(TestDifyAPISQLAlchemyWorkflowRunRepository): + def test_uses_trigger_log_repository(self, repository: DifyAPISQLAlchemyWorkflowRunRepository, mock_session: Mock): + node_ids_result = Mock() + node_ids_result.all.return_value = [] + pause_ids_result = Mock() + pause_ids_result.all.return_value = [] + mock_session.scalars.side_effect = [node_ids_result, pause_ids_result] + + # app_logs delete, runs delete + mock_session.execute.side_effect = [Mock(rowcount=0), Mock(rowcount=1)] + + fake_trigger_repo = Mock() + fake_trigger_repo.delete_by_run_ids.return_value = 3 + + run = Mock(id="run-1", tenant_id="t1", app_id="a1", workflow_id="w1", triggered_from="tf") + counts = repository.delete_runs_with_related( + [run], + delete_node_executions=lambda session, runs: (2, 1), + delete_trigger_logs=lambda session, run_ids: fake_trigger_repo.delete_by_run_ids(run_ids), + ) + + fake_trigger_repo.delete_by_run_ids.assert_called_once_with(["run-1"]) + assert counts["node_executions"] == 2 + assert counts["offloads"] == 1 + assert counts["trigger_logs"] == 3 + assert counts["runs"] == 1 + + +class TestCountRunsWithRelated(TestDifyAPISQLAlchemyWorkflowRunRepository): + def test_uses_trigger_log_repository(self, repository: DifyAPISQLAlchemyWorkflowRunRepository, mock_session: Mock): + pause_ids_result = Mock() + pause_ids_result.all.return_value = ["pause-1", "pause-2"] + mock_session.scalars.return_value = pause_ids_result + mock_session.scalar.side_effect = [5, 2] + + fake_trigger_repo = Mock() + fake_trigger_repo.count_by_run_ids.return_value = 3 + + run = Mock(id="run-1", tenant_id="t1", app_id="a1", workflow_id="w1", triggered_from="tf") + counts = repository.count_runs_with_related( + [run], + count_node_executions=lambda session, runs: (2, 1), + count_trigger_logs=lambda session, run_ids: fake_trigger_repo.count_by_run_ids(run_ids), + ) + + fake_trigger_repo.count_by_run_ids.assert_called_once_with(["run-1"]) + assert counts["node_executions"] == 2 + assert counts["offloads"] == 1 + assert counts["trigger_logs"] == 3 + assert counts["app_logs"] == 5 + assert counts["pauses"] == 2 + assert counts["pause_reasons"] == 2 + assert counts["runs"] == 1 + + class TestResumeWorkflowPause(TestDifyAPISQLAlchemyWorkflowRunRepository): """Test resume_workflow_pause method.""" diff --git a/api/tests/unit_tests/repositories/test_sqlalchemy_workflow_trigger_log_repository.py b/api/tests/unit_tests/repositories/test_sqlalchemy_workflow_trigger_log_repository.py new file mode 100644 index 0000000000..d409618211 --- /dev/null +++ b/api/tests/unit_tests/repositories/test_sqlalchemy_workflow_trigger_log_repository.py @@ -0,0 +1,31 @@ +from unittest.mock import Mock + +from sqlalchemy.dialects import postgresql +from sqlalchemy.orm import Session + +from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository + + +def test_delete_by_run_ids_executes_delete(): + session = Mock(spec=Session) + session.execute.return_value = Mock(rowcount=2) + repo = SQLAlchemyWorkflowTriggerLogRepository(session) + + deleted = repo.delete_by_run_ids(["run-1", "run-2"]) + + stmt = session.execute.call_args[0][0] + compiled_sql = str(stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})) + assert "workflow_trigger_logs" in compiled_sql + assert "'run-1'" in compiled_sql + assert "'run-2'" in compiled_sql + assert deleted == 2 + + +def test_delete_by_run_ids_empty_short_circuits(): + session = Mock(spec=Session) + repo = SQLAlchemyWorkflowTriggerLogRepository(session) + + deleted = repo.delete_by_run_ids([]) + + session.execute.assert_not_called() + assert deleted == 0 diff --git a/api/tests/unit_tests/services/test_clear_free_plan_expired_workflow_run_logs.py b/api/tests/unit_tests/services/test_clear_free_plan_expired_workflow_run_logs.py new file mode 100644 index 0000000000..8c80e2b4ad --- /dev/null +++ b/api/tests/unit_tests/services/test_clear_free_plan_expired_workflow_run_logs.py @@ -0,0 +1,327 @@ +import datetime +from typing import Any + +import pytest + +from services.billing_service import SubscriptionPlan +from services.retention.workflow_run import clear_free_plan_expired_workflow_run_logs as cleanup_module +from services.retention.workflow_run.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup + + +class FakeRun: + def __init__( + self, + run_id: str, + tenant_id: str, + created_at: datetime.datetime, + app_id: str = "app-1", + workflow_id: str = "wf-1", + triggered_from: str = "workflow-run", + ) -> None: + self.id = run_id + self.tenant_id = tenant_id + self.app_id = app_id + self.workflow_id = workflow_id + self.triggered_from = triggered_from + self.created_at = created_at + + +class FakeRepo: + def __init__( + self, + batches: list[list[FakeRun]], + delete_result: dict[str, int] | None = None, + count_result: dict[str, int] | None = None, + ) -> None: + self.batches = batches + self.call_idx = 0 + self.deleted: list[list[str]] = [] + self.counted: list[list[str]] = [] + self.delete_result = delete_result or { + "runs": 0, + "node_executions": 0, + "offloads": 0, + "app_logs": 0, + "trigger_logs": 0, + "pauses": 0, + "pause_reasons": 0, + } + self.count_result = count_result or { + "runs": 0, + "node_executions": 0, + "offloads": 0, + "app_logs": 0, + "trigger_logs": 0, + "pauses": 0, + "pause_reasons": 0, + } + + def get_runs_batch_by_time_range( + self, + start_from: datetime.datetime | None, + end_before: datetime.datetime, + last_seen: tuple[datetime.datetime, str] | None, + batch_size: int, + ) -> list[FakeRun]: + if self.call_idx >= len(self.batches): + return [] + batch = self.batches[self.call_idx] + self.call_idx += 1 + return batch + + def delete_runs_with_related( + self, runs: list[FakeRun], delete_node_executions=None, delete_trigger_logs=None + ) -> dict[str, int]: + self.deleted.append([run.id for run in runs]) + result = self.delete_result.copy() + result["runs"] = len(runs) + return result + + def count_runs_with_related( + self, runs: list[FakeRun], count_node_executions=None, count_trigger_logs=None + ) -> dict[str, int]: + self.counted.append([run.id for run in runs]) + result = self.count_result.copy() + result["runs"] = len(runs) + return result + + +def plan_info(plan: str, expiration: int) -> SubscriptionPlan: + return SubscriptionPlan(plan=plan, expiration_date=expiration) + + +def create_cleanup( + monkeypatch: pytest.MonkeyPatch, + repo: FakeRepo, + *, + grace_period_days: int = 0, + whitelist: set[str] | None = None, + **kwargs: Any, +) -> WorkflowRunCleanup: + monkeypatch.setattr( + cleanup_module.dify_config, + "SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD", + grace_period_days, + ) + monkeypatch.setattr( + cleanup_module.WorkflowRunCleanup, + "_get_cleanup_whitelist", + lambda self: whitelist or set(), + ) + return WorkflowRunCleanup(workflow_run_repo=repo, **kwargs) + + +def test_filter_free_tenants_billing_disabled(monkeypatch: pytest.MonkeyPatch) -> None: + cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", False) + + def fail_bulk(_: list[str]) -> dict[str, SubscriptionPlan]: + raise RuntimeError("should not call") + + monkeypatch.setattr(cleanup_module.BillingService, "get_plan_bulk_with_cache", staticmethod(fail_bulk)) + + tenants = {"t1", "t2"} + free = cleanup._filter_free_tenants(tenants) + + assert free == tenants + + +def test_filter_free_tenants_bulk_mixed(monkeypatch: pytest.MonkeyPatch) -> None: + cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True) + monkeypatch.setattr( + cleanup_module.BillingService, + "get_plan_bulk_with_cache", + staticmethod( + lambda tenant_ids: { + tenant_id: (plan_info("team", -1) if tenant_id == "t_paid" else plan_info("sandbox", -1)) + for tenant_id in tenant_ids + } + ), + ) + + free = cleanup._filter_free_tenants({"t_free", "t_paid", "t_missing"}) + + assert free == {"t_free", "t_missing"} + + +def test_filter_free_tenants_respects_grace_period(monkeypatch: pytest.MonkeyPatch) -> None: + cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, grace_period_days=45) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True) + now = datetime.datetime.now(datetime.UTC) + within_grace_ts = int((now - datetime.timedelta(days=10)).timestamp()) + outside_grace_ts = int((now - datetime.timedelta(days=90)).timestamp()) + + def fake_bulk(_: list[str]) -> dict[str, SubscriptionPlan]: + return { + "recently_downgraded": plan_info("sandbox", within_grace_ts), + "long_sandbox": plan_info("sandbox", outside_grace_ts), + } + + monkeypatch.setattr(cleanup_module.BillingService, "get_plan_bulk_with_cache", staticmethod(fake_bulk)) + + free = cleanup._filter_free_tenants({"recently_downgraded", "long_sandbox"}) + + assert free == {"long_sandbox"} + + +def test_filter_free_tenants_skips_cleanup_whitelist(monkeypatch: pytest.MonkeyPatch) -> None: + cleanup = create_cleanup( + monkeypatch, + repo=FakeRepo([]), + days=30, + batch_size=10, + whitelist={"tenant_whitelist"}, + ) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True) + monkeypatch.setattr( + cleanup_module.BillingService, + "get_plan_bulk_with_cache", + staticmethod( + lambda tenant_ids: { + tenant_id: (plan_info("team", -1) if tenant_id == "t_paid" else plan_info("sandbox", -1)) + for tenant_id in tenant_ids + } + ), + ) + + tenants = {"tenant_whitelist", "tenant_regular"} + free = cleanup._filter_free_tenants(tenants) + + assert free == {"tenant_regular"} + + +def test_filter_free_tenants_bulk_failure(monkeypatch: pytest.MonkeyPatch) -> None: + cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True) + monkeypatch.setattr( + cleanup_module.BillingService, + "get_plan_bulk_with_cache", + staticmethod(lambda tenant_ids: (_ for _ in ()).throw(RuntimeError("boom"))), + ) + + free = cleanup._filter_free_tenants({"t1", "t2"}) + + assert free == set() + + +def test_run_deletes_only_free_tenants(monkeypatch: pytest.MonkeyPatch) -> None: + cutoff = datetime.datetime.now() + repo = FakeRepo( + batches=[ + [ + FakeRun("run-free", "t_free", cutoff), + FakeRun("run-paid", "t_paid", cutoff), + ] + ] + ) + cleanup = create_cleanup(monkeypatch, repo=repo, days=30, batch_size=10) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True) + monkeypatch.setattr( + cleanup_module.BillingService, + "get_plan_bulk_with_cache", + staticmethod( + lambda tenant_ids: { + tenant_id: (plan_info("team", -1) if tenant_id == "t_paid" else plan_info("sandbox", -1)) + for tenant_id in tenant_ids + } + ), + ) + + cleanup.run() + + assert repo.deleted == [["run-free"]] + + +def test_run_skips_when_no_free_tenants(monkeypatch: pytest.MonkeyPatch) -> None: + cutoff = datetime.datetime.now() + repo = FakeRepo(batches=[[FakeRun("run-paid", "t_paid", cutoff)]]) + cleanup = create_cleanup(monkeypatch, repo=repo, days=30, batch_size=10) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", True) + monkeypatch.setattr( + cleanup_module.BillingService, + "get_plan_bulk_with_cache", + staticmethod(lambda tenant_ids: {tenant_id: plan_info("team", 1893456000) for tenant_id in tenant_ids}), + ) + + cleanup.run() + + assert repo.deleted == [] + + +def test_run_exits_on_empty_batch(monkeypatch: pytest.MonkeyPatch) -> None: + cleanup = create_cleanup(monkeypatch, repo=FakeRepo([]), days=30, batch_size=10) + + cleanup.run() + + +def test_run_dry_run_skips_deletions(monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]) -> None: + cutoff = datetime.datetime.now() + repo = FakeRepo( + batches=[[FakeRun("run-free", "t_free", cutoff)]], + count_result={ + "runs": 0, + "node_executions": 2, + "offloads": 1, + "app_logs": 3, + "trigger_logs": 4, + "pauses": 5, + "pause_reasons": 6, + }, + ) + cleanup = create_cleanup(monkeypatch, repo=repo, days=30, batch_size=10, dry_run=True) + + monkeypatch.setattr(cleanup_module.dify_config, "BILLING_ENABLED", False) + + cleanup.run() + + assert repo.deleted == [] + assert repo.counted == [["run-free"]] + captured = capsys.readouterr().out + assert "Dry run mode enabled" in captured + assert "would delete 1 runs" in captured + assert "related records" in captured + assert "node_executions 2" in captured + assert "offloads 1" in captured + assert "app_logs 3" in captured + assert "trigger_logs 4" in captured + assert "pauses 5" in captured + assert "pause_reasons 6" in captured + + +def test_between_sets_window_bounds(monkeypatch: pytest.MonkeyPatch) -> None: + start_from = datetime.datetime(2024, 5, 1, 0, 0, 0) + end_before = datetime.datetime(2024, 6, 1, 0, 0, 0) + cleanup = create_cleanup( + monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_from=start_from, end_before=end_before + ) + + assert cleanup.window_start == start_from + assert cleanup.window_end == end_before + + +def test_between_requires_both_boundaries(monkeypatch: pytest.MonkeyPatch) -> None: + with pytest.raises(ValueError): + create_cleanup( + monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_from=datetime.datetime.now(), end_before=None + ) + with pytest.raises(ValueError): + create_cleanup( + monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_from=None, end_before=datetime.datetime.now() + ) + + +def test_between_requires_end_after_start(monkeypatch: pytest.MonkeyPatch) -> None: + start_from = datetime.datetime(2024, 6, 1, 0, 0, 0) + end_before = datetime.datetime(2024, 5, 1, 0, 0, 0) + with pytest.raises(ValueError): + create_cleanup( + monkeypatch, repo=FakeRepo([]), days=30, batch_size=10, start_from=start_from, end_before=end_before + ) diff --git a/docker/.env.example b/docker/.env.example index 09ee1060e2..e7cb8711ce 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -1478,6 +1478,7 @@ ENABLE_CLEAN_UNUSED_DATASETS_TASK=false ENABLE_CREATE_TIDB_SERVERLESS_TASK=false ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false ENABLE_CLEAN_MESSAGES=false +ENABLE_WORKFLOW_RUN_CLEANUP_TASK=false ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false ENABLE_DATASETS_QUEUE_MONITOR=false ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 712de84c62..041f60aaa2 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -662,6 +662,7 @@ x-shared-env: &shared-api-worker-env ENABLE_CREATE_TIDB_SERVERLESS_TASK: ${ENABLE_CREATE_TIDB_SERVERLESS_TASK:-false} ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: ${ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK:-false} ENABLE_CLEAN_MESSAGES: ${ENABLE_CLEAN_MESSAGES:-false} + ENABLE_WORKFLOW_RUN_CLEANUP_TASK: ${ENABLE_WORKFLOW_RUN_CLEANUP_TASK:-false} ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: ${ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK:-false} ENABLE_DATASETS_QUEUE_MONITOR: ${ENABLE_DATASETS_QUEUE_MONITOR:-false} ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: ${ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK:-true} From b63dfbf654ed2191aa94aa4bcf8d09c09cc90759 Mon Sep 17 00:00:00 2001 From: QuantumGhost Date: Mon, 12 Jan 2026 16:23:18 +0800 Subject: [PATCH 4/4] fix(api): defer streaming response until referenced variables are updated (#30832) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../nodes/variable_assigner/v1/node.py | 9 + ...ng_conversation_variables_v1_overwrite.yml | 158 ++++++++++++++++++ .../test_streaming_conversation_variables.py | 30 ++++ 3 files changed, 197 insertions(+) create mode 100644 api/tests/fixtures/workflow/test_streaming_conversation_variables_v1_overwrite.yml diff --git a/api/core/workflow/nodes/variable_assigner/v1/node.py b/api/core/workflow/nodes/variable_assigner/v1/node.py index d2ea7d94ea..ac2870aa65 100644 --- a/api/core/workflow/nodes/variable_assigner/v1/node.py +++ b/api/core/workflow/nodes/variable_assigner/v1/node.py @@ -33,6 +33,15 @@ class VariableAssignerNode(Node[VariableAssignerData]): graph_runtime_state=graph_runtime_state, ) + def blocks_variable_output(self, variable_selectors: set[tuple[str, ...]]) -> bool: + """ + Check if this Variable Assigner node blocks the output of specific variables. + + Returns True if this node updates any of the requested conversation variables. + """ + assigned_selector = tuple(self.node_data.assigned_variable_selector) + return assigned_selector in variable_selectors + @classmethod def version(cls) -> str: return "1" diff --git a/api/tests/fixtures/workflow/test_streaming_conversation_variables_v1_overwrite.yml b/api/tests/fixtures/workflow/test_streaming_conversation_variables_v1_overwrite.yml new file mode 100644 index 0000000000..5b2a6260e9 --- /dev/null +++ b/api/tests/fixtures/workflow/test_streaming_conversation_variables_v1_overwrite.yml @@ -0,0 +1,158 @@ +app: + description: Validate v1 Variable Assigner blocks streaming until conversation variable is updated. + icon: 🤖 + icon_background: '#FFEAD5' + mode: advanced-chat + name: test_streaming_conversation_variables_v1_overwrite + use_icon_as_answer_icon: false +dependencies: [] +kind: app +version: 0.5.0 +workflow: + conversation_variables: + - description: '' + id: 6ddf2d7f-3d1b-4bb0-9a5e-9b0c87c7b5e6 + name: conv_var + selector: + - conversation + - conv_var + value: default + value_type: string + environment_variables: [] + features: + file_upload: + allowed_file_extensions: + - .JPG + - .JPEG + - .PNG + - .GIF + - .WEBP + - .SVG + allowed_file_types: + - image + allowed_file_upload_methods: + - local_file + - remote_url + enabled: false + fileUploadConfig: + audio_file_size_limit: 50 + batch_count_limit: 5 + file_size_limit: 15 + image_file_size_limit: 10 + video_file_size_limit: 100 + workflow_file_upload_limit: 10 + image: + enabled: false + number_limits: 3 + transfer_methods: + - local_file + - remote_url + number_limits: 3 + opening_statement: '' + retriever_resource: + enabled: true + sensitive_word_avoidance: + enabled: false + speech_to_text: + enabled: false + suggested_questions: [] + suggested_questions_after_answer: + enabled: false + text_to_speech: + enabled: false + language: '' + voice: '' + graph: + edges: + - data: + isInIteration: false + isInLoop: false + sourceType: start + targetType: assigner + id: start-source-assigner-target + source: start + sourceHandle: source + target: assigner + targetHandle: target + type: custom + zIndex: 0 + - data: + isInLoop: false + sourceType: assigner + targetType: answer + id: assigner-source-answer-target + source: assigner + sourceHandle: source + target: answer + targetHandle: target + type: custom + zIndex: 0 + nodes: + - data: + desc: '' + selected: false + title: Start + type: start + variables: [] + height: 54 + id: start + position: + x: 30 + y: 253 + positionAbsolute: + x: 30 + y: 253 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 244 + - data: + answer: 'Current Value Of `conv_var` is:{{#conversation.conv_var#}}' + desc: '' + selected: false + title: Answer + type: answer + variables: [] + height: 106 + id: answer + position: + x: 638 + y: 253 + positionAbsolute: + x: 638 + y: 253 + selected: true + sourcePosition: right + targetPosition: left + type: custom + width: 244 + - data: + assigned_variable_selector: + - conversation + - conv_var + desc: '' + input_variable_selector: + - sys + - query + selected: false + title: Variable Assigner + type: assigner + write_mode: over-write + height: 84 + id: assigner + position: + x: 334 + y: 253 + positionAbsolute: + x: 334 + y: 253 + selected: false + sourcePosition: right + targetPosition: left + type: custom + width: 244 + viewport: + x: 0 + y: 0 + zoom: 0.7 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_streaming_conversation_variables.py b/api/tests/unit_tests/core/workflow/graph_engine/test_streaming_conversation_variables.py index 1f4c063bf0..99157a7c3e 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_streaming_conversation_variables.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_streaming_conversation_variables.py @@ -45,3 +45,33 @@ def test_streaming_conversation_variables(): runner = TableTestRunner() result = runner.run_test_case(case) assert result.success, f"Test failed: {result.error}" + + +def test_streaming_conversation_variables_v1_overwrite_waits_for_assignment(): + fixture_name = "test_streaming_conversation_variables_v1_overwrite" + input_query = "overwrite-value" + + case = WorkflowTestCase( + fixture_path=fixture_name, + use_auto_mock=False, + mock_config=MockConfigBuilder().build(), + query=input_query, + inputs={}, + expected_outputs={"answer": f"Current Value Of `conv_var` is:{input_query}"}, + ) + + runner = TableTestRunner() + result = runner.run_test_case(case) + assert result.success, f"Test failed: {result.error}" + + events = result.events + conv_var_chunk_events = [ + event + for event in events + if isinstance(event, NodeRunStreamChunkEvent) and tuple(event.selector) == ("conversation", "conv_var") + ] + + assert conv_var_chunk_events, "Expected conversation variable chunk events to be emitted" + assert all(event.chunk == input_query for event in conv_var_chunk_events), ( + "Expected streamed conversation variable value to match the input query" + )