+ There are {currentQueueSize} + user(s) sharing the same GPU, affecting real-time performance. Maximum queue size is {maxQueueSize}. + Duplicate and run it on your own GPU. +
+ {/if} +Loading...
+diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..fe4fa26ff55111c9a7db1c086c17591625351bd9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +__pycache__/ +venv/ +public/ +*.pem \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..d27e0a102d7cad10945387adf5ebce603fe785c6 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,48 @@ +FROM nvidia/cuda:12.1.1-cudnn8-devel-ubuntu22.04 + +ARG DEBIAN_FRONTEND=noninteractive + +ENV PYTHONUNBUFFERED=1 +ENV NODE_MAJOR=20 + +RUN apt-get update && apt-get install --no-install-recommends -y \ + build-essential \ + python3.9 \ + python3-pip \ + python3-dev \ + git \ + ffmpeg \ + google-perftools \ + ca-certificates curl gnupg \ + && apt-get clean && rm -rf /var/lib/apt/lists/* + +WORKDIR /code + +RUN mkdir -p /etc/apt/keyrings +RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg +RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_${NODE_MAJOR}.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list > /dev/null +RUN apt-get update && apt-get install nodejs -y + +COPY ./requirements.txt /code/requirements.txt + +# Set up a new user named "user" with user ID 1000 +RUN useradd -m -u 1000 user +# Switch to the "user" user +USER user +# Set home to the user's home directory +ENV HOME=/home/user \ + PATH=/home/user/.local/bin:$PATH \ + PYTHONPATH=$HOME/app \ + PYTHONUNBUFFERED=1 \ + SYSTEM=spaces + +RUN pip3 install --no-cache-dir --upgrade --pre -r /code/requirements.txt + +# Set the working directory to the user's home directory +WORKDIR $HOME/app + +# Copy the current directory contents into the container at $HOME/app setting the owner to the user +COPY --chown=user . $HOME/app + +ENV LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libtcmalloc.so.4 +CMD ["./build-run.sh"] \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index dad88f0288e7aaba5e30135b6f3d8a35f35471ec..b1303d40c09ada07fa30e5cc3db1b61c528c5e5e 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,193 @@ --- -title: Real Time Model -emoji: 🐢 -colorFrom: green -colorTo: yellow +title: Real-Time Latent Consistency Model Image-to-Image ControlNet +emoji: 🖼️🖼️ +colorFrom: gray +colorTo: indigo sdk: docker pinned: false +suggested_hardware: a10g-small +disable_embedding: true --- -Check out the configuration reference at https://huggingface.co./docs/hub/spaces-config-reference +# Real-Time Latent Consistency Model + +This demo showcases [Latent Consistency Model (LCM)](https://latent-consistency-models.github.io/) using [Diffusers](https://huggingface.co./docs/diffusers/using-diffusers/lcm) with a MJPEG stream server. You can read more about LCM + LoRAs with diffusers [here](https://huggingface.co./blog/lcm_lora). + +You need a webcam to run this demo. 🤗 + +See a collecting with live demos [here](https://huggingface.co./collections/latent-consistency/latent-consistency-model-demos-654e90c52adb0688a0acbe6f) + +## Running Locally + +You need CUDA and Python 3.10, Node > 19, Mac with an M1/M2/M3 chip or Intel Arc GPU + + +## Install + +```bash +python -m venv venv +source venv/bin/activate +pip3 install -r server/requirements.txt +cd frontend && npm install && npm run build && cd .. +python server/main.py --reload --pipeline img2imgSDTurbo + ``` + +Don't forget to fuild the frontend!!! + +```bash +cd frontend && npm install && npm run build && cd .. +``` + +# Pipelines +You can build your own pipeline following examples here [here](pipelines), + + +# LCM +### Image to Image + +```bash +python server/main.py --reload --pipeline img2img +``` + +# LCM +### Text to Image + +```bash +python server/main.py --reload --pipeline txt2img +``` + +### Image to Image ControlNet Canny + +```bash +python server/main.py --reload --pipeline controlnet +``` + + +# LCM + LoRa + +Using LCM-LoRA, giving it the super power of doing inference in as little as 4 steps. [Learn more here](https://huggingface.co./blog/lcm_lora) or [technical report](https://huggingface.co./papers/2311.05556) + + +### Image to Image ControlNet Canny LoRa + +```bash +python server/main.py --reload --pipeline controlnetLoraSD15 +``` +or SDXL, note that SDXL is slower than SD15 since the inference runs on 1024x1024 images + +```bash +python server/main.py --reload --pipeline controlnetLoraSDXL +``` + +### Text to Image + +```bash +python server/main.py --reload --pipeline txt2imgLora +``` + +```bash +python server/main.py --reload --pipeline txt2imgLoraSDXL +``` +# Available Pipelines + +#### [LCM](https://huggingface.co./SimianLuo/LCM_Dreamshaper_v7) + +`img2img` +`txt2img` +`controlnet` +`txt2imgLora` +`controlnetLoraSD15` + +#### [SD15](https://huggingface.co./stabilityai/stable-diffusion-xl-base-1.0) +`controlnetLoraSDXL` +`txt2imgLoraSDXL` + +#### [SDXL Turbo](https://huggingface.co./stabilityai/sd-xl-turbo) + +`img2imgSDXLTurbo` +`controlnetSDXLTurbo` + + +#### [SDTurbo](https://huggingface.co./stabilityai/sd-turbo) +`img2imgSDTurbo` +`controlnetSDTurbo` + +#### [Segmind-Vega](https://huggingface.co./segmind/Segmind-Vega) +`controlnetSegmindVegaRT` +`img2imgSegmindVegaRT` + + +### Setting environment variables + + +* `--host`: Host address (default: 0.0.0.0) +* `--port`: Port number (default: 7860) +* `--reload`: Reload code on change +* `--max-queue-size`: Maximum queue size (optional) +* `--timeout`: Timeout period (optional) +* `--safety-checker`: Enable Safety Checker (optional) +* `--torch-compile`: Use Torch Compile +* `--use-taesd` / `--no-taesd`: Use Tiny Autoencoder +* `--pipeline`: Pipeline to use (default: "txt2img") +* `--ssl-certfile`: SSL Certificate File (optional) +* `--ssl-keyfile`: SSL Key File (optional) +* `--debug`: Print Inference time +* `--compel`: Compel option +* `--sfast`: Enable Stable Fast +* `--onediff`: Enable OneDiff + +If you run using `bash build-run.sh` you can set `PIPELINE` variables to choose the pipeline you want to run + +```bash +PIPELINE=txt2imgLoraSDXL bash build-run.sh +``` + +and setting environment variables + +```bash +TIMEOUT=120 SAFETY_CHECKER=True MAX_QUEUE_SIZE=4 python server/main.py --reload --pipeline txt2imgLoraSDXL +``` + +If you're running locally and want to test it on Mobile Safari, the webserver needs to be served over HTTPS, or follow this instruction on my [comment](https://github.com/radames/Real-Time-Latent-Consistency-Model/issues/17#issuecomment-1811957196) + +```bash +openssl req -newkey rsa:4096 -nodes -keyout key.pem -x509 -days 365 -out certificate.pem +python server/main.py --reload --ssl-certfile=certificate.pem --ssl-keyfile=key.pem +``` + +## Docker + +You need NVIDIA Container Toolkit for Docker, defaults to `controlnet`` + +```bash +docker build -t lcm-live . +docker run -ti -p 7860:7860 --gpus all lcm-live +``` + +reuse models data from host to avoid downloading them again, you can change `~/.cache/huggingface` to any other directory, but if you use hugingface-cli locally, you can share the same cache + +```bash +docker run -ti -p 7860:7860 -e HF_HOME=/data -v ~/.cache/huggingface:/data --gpus all lcm-live +``` + + +or with environment variables + +```bash +docker run -ti -e PIPELINE=txt2imgLoraSDXL -p 7860:7860 --gpus all lcm-live +``` + + +# Demo on Hugging Face + + +* [radames/Real-Time-Latent-Consistency-Model](https://huggingface.co./spaces/radames/Real-Time-Latent-Consistency-Model) +* [radames/Real-Time-SD-Turbo](https://huggingface.co./spaces/radames/Real-Time-SD-Turbo) +* [latent-consistency/Real-Time-LCM-ControlNet-Lora-SD1.5](https://huggingface.co./spaces/latent-consistency/Real-Time-LCM-ControlNet-Lora-SD1.5) +* [latent-consistency/Real-Time-LCM-Text-to-Image-Lora-SD1.5](https://huggingface.co./spaces/latent-consistency/Real-Time-LCM-Text-to-Image-Lora-SD1.5) +* [radames/Real-Time-Latent-Consistency-Model-Text-To-Image](https://huggingface.co./spaces/radames/Real-Time-Latent-Consistency-Model-Text-To-Image) + + + + +https://github.com/radames/Real-Time-Latent-Consistency-Model/assets/102277/c4003ac5-e7ff-44c0-97d3-464bb659de70 diff --git a/build-run.sh b/build-run.sh new file mode 100755 index 0000000000000000000000000000000000000000..c40dc4735d9fd1ce1c9ed2821685e764fe568a24 --- /dev/null +++ b/build-run.sh @@ -0,0 +1,20 @@ +#!/bin/bash +cd frontend +npm install +npm run build +if [ $? -eq 0 ]; then + echo -e "\033[1;32m\nfrontend build success \033[0m" +else + echo -e "\033[1;31m\nfrontend build failed\n\033[0m" >&2 exit 1 +fi +cd ../ +#check if var PIPELINE is set otherwise get default +if [ -z ${PIPELINE+x} ]; then + PIPELINE="controlnet" +fi +if [ -z ${COMPILE+x} ]; then + COMPILE="--sfast" +fi +echo -e "\033[1;32m\npipeline: $PIPELINE \033[0m" +echo -e "\033[1;32m\ncompile: $COMPILE \033[0m" +python3 ./server/main.py --port 7860 --host 0.0.0.0 --pipeline $PIPELINE $COMPILE \ No newline at end of file diff --git a/frontend/.eslintignore b/frontend/.eslintignore new file mode 100644 index 0000000000000000000000000000000000000000..38972655faff07d2cc0383044bbf9f43b22c2248 --- /dev/null +++ b/frontend/.eslintignore @@ -0,0 +1,13 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example + +# Ignore files for PNPM, NPM and YARN +pnpm-lock.yaml +package-lock.json +yarn.lock diff --git a/frontend/.eslintrc.cjs b/frontend/.eslintrc.cjs new file mode 100644 index 0000000000000000000000000000000000000000..4b29a0161622dc51bb2241de737698e3ad4a917f --- /dev/null +++ b/frontend/.eslintrc.cjs @@ -0,0 +1,30 @@ +module.exports = { + root: true, + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + 'plugin:svelte/recommended', + 'prettier' + ], + parser: '@typescript-eslint/parser', + plugins: ['@typescript-eslint'], + parserOptions: { + sourceType: 'module', + ecmaVersion: 2020, + extraFileExtensions: ['.svelte'] + }, + env: { + browser: true, + es2017: true, + node: true + }, + overrides: [ + { + files: ['*.svelte'], + parser: 'svelte-eslint-parser', + parserOptions: { + parser: '@typescript-eslint/parser' + } + } + ] +}; diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..690b0a749f7144f3dd4ff59b4759799f1d38364e --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,11 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example +vite.config.js.timestamp-* +vite.config.ts.timestamp-* +public diff --git a/frontend/.npmrc b/frontend/.npmrc new file mode 100644 index 0000000000000000000000000000000000000000..b6f27f135954640c8cc5bfd7b8c9922ca6eb2aad --- /dev/null +++ b/frontend/.npmrc @@ -0,0 +1 @@ +engine-strict=true diff --git a/frontend/.nvmrc b/frontend/.nvmrc new file mode 100644 index 0000000000000000000000000000000000000000..42e31a00cf222fb338fdb8a2fb66adcfdfd785c2 --- /dev/null +++ b/frontend/.nvmrc @@ -0,0 +1 @@ +v20.14.0 diff --git a/frontend/.prettierignore b/frontend/.prettierignore new file mode 100644 index 0000000000000000000000000000000000000000..38972655faff07d2cc0383044bbf9f43b22c2248 --- /dev/null +++ b/frontend/.prettierignore @@ -0,0 +1,13 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example + +# Ignore files for PNPM, NPM and YARN +pnpm-lock.yaml +package-lock.json +yarn.lock diff --git a/frontend/.prettierrc b/frontend/.prettierrc new file mode 100644 index 0000000000000000000000000000000000000000..34a470beb1a69badb3632a52621afdf960238001 --- /dev/null +++ b/frontend/.prettierrc @@ -0,0 +1,19 @@ +{ + "useTabs": false, + "singleQuote": true, + "trailingComma": "none", + "printWidth": 100, + "plugins": [ + "prettier-plugin-svelte", + "prettier-plugin-organize-imports", + "prettier-plugin-tailwindcss" + ], + "overrides": [ + { + "files": "*.svelte", + "options": { + "parser": "svelte" + } + } + ] +} diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5c91169b0ca6508bb24301c957a9edea5abf2b01 --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,38 @@ +# create-svelte + +Everything you need to build a Svelte project, powered by [`create-svelte`](https://github.com/sveltejs/kit/tree/master/packages/create-svelte). + +## Creating a project + +If you're seeing this, you've probably already done this step. Congrats! + +```bash +# create a new project in the current directory +npm create svelte@latest + +# create a new project in my-app +npm create svelte@latest my-app +``` + +## Developing + +Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server: + +```bash +npm run dev + +# or start the server and open the app in a new browser tab +npm run dev -- --open +``` + +## Building + +To create a production version of your app: + +```bash +npm run build +``` + +You can preview the production build with `npm run preview`. + +> To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment. diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000000000000000000000000000000000000..d49c7e5a14ddbbf804e3ea90859e7c31e8a8c0a5 --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,4446 @@ +{ + "name": "frontend", + "version": "0.0.1", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "frontend", + "version": "0.0.1", + "dependencies": { + "piexifjs": "^1.0.6", + "rvfc-polyfill": "^1.0.7" + }, + "devDependencies": { + "@sveltejs/adapter-auto": "^3.0.0", + "@sveltejs/adapter-static": "^3", + "@sveltejs/kit": "^2.0.0", + "@sveltejs/vite-plugin-svelte": "^3.1.1", + "@types/eslint": "^8.56.0", + "@typescript-eslint/eslint-plugin": "^7.12.0", + "@typescript-eslint/parser": "^7.12.0", + "autoprefixer": "^10.4.16", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-svelte": "^2.35.1", + "postcss": "^8.4.31", + "prettier": "^3.3.1", + "prettier-plugin-organize-imports": "^3.2.4", + "prettier-plugin-svelte": "^3.2.4", + "prettier-plugin-tailwindcss": "^0.6.1", + "svelte": "^4.2.18", + "svelte-check": "^3.8.0", + "tailwindcss": "^3.4.4", + "tslib": "^2.6.3", + "typescript": "^5.0.0", + "vite": "^5.0.3" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz", + "integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz", + "integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz", + "integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz", + "integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz", + "integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz", + "integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz", + "integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz", + "integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz", + "integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz", + "integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz", + "integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz", + "integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz", + "integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz", + "integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz", + "integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz", + "integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz", + "integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz", + "integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz", + "integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz", + "integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz", + "integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz", + "integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz", + "integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz", + "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.14", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", + "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.2", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "dev": true + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.15", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", + "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@polka/url": { + "version": "1.0.0-next.25", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.25.tgz", + "integrity": "sha512-j7P6Rgr3mmtdkeDGTe0E/aYyWEWVtc5yFXtHCRHs28/jptDEWfaVOc5T7cblqy1XKPPfCxJc/8DwQ5YgLOZOVQ==", + "dev": true + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.18.0.tgz", + "integrity": "sha512-Tya6xypR10giZV1XzxmH5wr25VcZSncG0pZIjfePT0OVBvqNEurzValetGNarVrGiq66EBVAFn15iYX4w6FKgQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.18.0.tgz", + "integrity": "sha512-avCea0RAP03lTsDhEyfy+hpfr85KfyTctMADqHVhLAF3MlIkq83CP8UfAHUssgXTYd+6er6PaAhx/QGv4L1EiA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.18.0.tgz", + "integrity": "sha512-IWfdwU7KDSm07Ty0PuA/W2JYoZ4iTj3TUQjkVsO/6U+4I1jN5lcR71ZEvRh52sDOERdnNhhHU57UITXz5jC1/w==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.18.0.tgz", + "integrity": "sha512-n2LMsUz7Ynu7DoQrSQkBf8iNrjOGyPLrdSg802vk6XT3FtsgX6JbE8IHRvposskFm9SNxzkLYGSq9QdpLYpRNA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.18.0.tgz", + "integrity": "sha512-C/zbRYRXFjWvz9Z4haRxcTdnkPt1BtCkz+7RtBSuNmKzMzp3ZxdM28Mpccn6pt28/UWUCTXa+b0Mx1k3g6NOMA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.18.0.tgz", + "integrity": "sha512-l3m9ewPgjQSXrUMHg93vt0hYCGnrMOcUpTz6FLtbwljo2HluS4zTXFy2571YQbisTnfTKPZ01u/ukJdQTLGh9A==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.18.0.tgz", + "integrity": "sha512-rJ5D47d8WD7J+7STKdCUAgmQk49xuFrRi9pZkWoRD1UeSMakbcepWXPF8ycChBoAqs1pb2wzvbY6Q33WmN2ftw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.18.0.tgz", + "integrity": "sha512-be6Yx37b24ZwxQ+wOQXXLZqpq4jTckJhtGlWGZs68TgdKXJgw54lUUoFYrg6Zs/kjzAQwEwYbp8JxZVzZLRepQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.18.0.tgz", + "integrity": "sha512-hNVMQK+qrA9Todu9+wqrXOHxFiD5YmdEi3paj6vP02Kx1hjd2LLYR2eaN7DsEshg09+9uzWi2W18MJDlG0cxJA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.18.0.tgz", + "integrity": "sha512-ROCM7i+m1NfdrsmvwSzoxp9HFtmKGHEqu5NNDiZWQtXLA8S5HBCkVvKAxJ8U+CVctHwV2Gb5VUaK7UAkzhDjlg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.18.0.tgz", + "integrity": "sha512-0UyyRHyDN42QL+NbqevXIIUnKA47A+45WyasO+y2bGJ1mhQrfrtXUpTxCOrfxCR4esV3/RLYyucGVPiUsO8xjg==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.18.0.tgz", + "integrity": "sha512-xuglR2rBVHA5UsI8h8UbX4VJ470PtGCf5Vpswh7p2ukaqBGFTnsfzxUBetoWBWymHMxbIG0Cmx7Y9qDZzr648w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.18.0.tgz", + "integrity": "sha512-LKaqQL9osY/ir2geuLVvRRs+utWUNilzdE90TpyoX0eNqPzWjRm14oMEE+YLve4k/NAqCdPkGYDaDF5Sw+xBfg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.18.0.tgz", + "integrity": "sha512-7J6TkZQFGo9qBKH0pk2cEVSRhJbL6MtfWxth7Y5YmZs57Pi+4x6c2dStAUvaQkHQLnEQv1jzBUW43GvZW8OFqA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.18.0.tgz", + "integrity": "sha512-Txjh+IxBPbkUB9+SXZMpv+b/vnTEtFyfWZgJ6iyCmt2tdx0OF5WhFowLmnh8ENGNpfUlUZkdI//4IEmhwPieNg==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.18.0.tgz", + "integrity": "sha512-UOo5FdvOL0+eIVTgS4tIdbW+TtnBLWg1YBCcU2KWM7nuNwRz9bksDX1bekJJCpu25N1DVWaCwnT39dVQxzqS8g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@sveltejs/adapter-auto": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-auto/-/adapter-auto-3.2.1.tgz", + "integrity": "sha512-/3xx8ZFCD5UBc/7AbyXkFF3HNCzWAp2xncH8HA4doGjoGQEN7PmwiRx4Y9nOzi4mqDqYYUic0gaIAE2khWWU4Q==", + "dev": true, + "dependencies": { + "import-meta-resolve": "^4.1.0" + }, + "peerDependencies": { + "@sveltejs/kit": "^2.0.0" + } + }, + "node_modules/@sveltejs/adapter-static": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-static/-/adapter-static-3.0.1.tgz", + "integrity": "sha512-6lMvf7xYEJ+oGeR5L8DFJJrowkefTK6ZgA4JiMqoClMkKq0s6yvsd3FZfCFvX1fQ0tpCD7fkuRVHsnUVgsHyNg==", + "dev": true, + "peerDependencies": { + "@sveltejs/kit": "^2.0.0" + } + }, + "node_modules/@sveltejs/kit": { + "version": "2.5.10", + "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.5.10.tgz", + "integrity": "sha512-OqoyTmFG2cYmCFAdBfW+Qxbg8m23H4dv6KqwEt7ofr/ROcfcIl3Z/VT56L22H9f0uNZyr+9Bs1eh2gedOCK9kA==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "@types/cookie": "^0.6.0", + "cookie": "^0.6.0", + "devalue": "^5.0.0", + "esm-env": "^1.0.0", + "import-meta-resolve": "^4.1.0", + "kleur": "^4.1.5", + "magic-string": "^0.30.5", + "mrmime": "^2.0.0", + "sade": "^1.8.1", + "set-cookie-parser": "^2.6.0", + "sirv": "^2.0.4", + "tiny-glob": "^0.2.9" + }, + "bin": { + "svelte-kit": "svelte-kit.js" + }, + "engines": { + "node": ">=18.13" + }, + "peerDependencies": { + "@sveltejs/vite-plugin-svelte": "^3.0.0", + "svelte": "^4.0.0 || ^5.0.0-next.0", + "vite": "^5.0.3" + } + }, + "node_modules/@sveltejs/vite-plugin-svelte": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-3.1.1.tgz", + "integrity": "sha512-rimpFEAboBBHIlzISibg94iP09k/KYdHgVhJlcsTfn7KMBhc70jFX/GRWkRdFCc2fdnk+4+Bdfej23cMDnJS6A==", + "dev": true, + "dependencies": { + "@sveltejs/vite-plugin-svelte-inspector": "^2.1.0", + "debug": "^4.3.4", + "deepmerge": "^4.3.1", + "kleur": "^4.1.5", + "magic-string": "^0.30.10", + "svelte-hmr": "^0.16.0", + "vitefu": "^0.2.5" + }, + "engines": { + "node": "^18.0.0 || >=20" + }, + "peerDependencies": { + "svelte": "^4.0.0 || ^5.0.0-next.0", + "vite": "^5.0.0" + } + }, + "node_modules/@sveltejs/vite-plugin-svelte-inspector": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-2.1.0.tgz", + "integrity": "sha512-9QX28IymvBlSCqsCll5t0kQVxipsfhFFL+L2t3nTWfXnddYwxBuAEtTtlaVQpRz9c37BhJjltSeY4AJSC03SSg==", + "dev": true, + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.0.0 || >=20" + }, + "peerDependencies": { + "@sveltejs/vite-plugin-svelte": "^3.0.0", + "svelte": "^4.0.0 || ^5.0.0-next.0", + "vite": "^5.0.0" + } + }, + "node_modules/@types/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", + "dev": true + }, + "node_modules/@types/eslint": { + "version": "8.56.10", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.10.tgz", + "integrity": "sha512-Shavhk87gCtY2fhXDctcfS3e6FdxWkCx1iUZ9eEUbh7rTqlZT0/IzOkCOVt0fCjcFuZ9FPYfuezTBImfHCDBGQ==", + "dev": true, + "dependencies": { + "@types/estree": "*", + "@types/json-schema": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", + "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", + "dev": true + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true + }, + "node_modules/@types/pug": { + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/@types/pug/-/pug-2.0.10.tgz", + "integrity": "sha512-Sk/uYFOBAB7mb74XcpizmH0KOR2Pv3D2Hmrh1Dmy5BmK3MpdSa5kqZcg6EKBdklU0bFXX9gCfzvpnyUehrPIuA==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.12.0.tgz", + "integrity": "sha512-7F91fcbuDf/d3S8o21+r3ZncGIke/+eWk0EpO21LXhDfLahriZF9CGj4fbAetEjlaBdjdSm9a6VeXbpbT6Z40Q==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "7.12.0", + "@typescript-eslint/type-utils": "7.12.0", + "@typescript-eslint/utils": "7.12.0", + "@typescript-eslint/visitor-keys": "7.12.0", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.12.0.tgz", + "integrity": "sha512-dm/J2UDY3oV3TKius2OUZIFHsomQmpHtsV0FTh1WO8EKgHLQ1QCADUqscPgTpU+ih1e21FQSRjXckHn3txn6kQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "7.12.0", + "@typescript-eslint/types": "7.12.0", + "@typescript-eslint/typescript-estree": "7.12.0", + "@typescript-eslint/visitor-keys": "7.12.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.12.0.tgz", + "integrity": "sha512-itF1pTnN6F3unPak+kutH9raIkL3lhH1YRPGgt7QQOh43DQKVJXmWkpb+vpc/TiDHs6RSd9CTbDsc/Y+Ygq7kg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "7.12.0", + "@typescript-eslint/visitor-keys": "7.12.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.12.0.tgz", + "integrity": "sha512-lib96tyRtMhLxwauDWUp/uW3FMhLA6D0rJ8T7HmH7x23Gk1Gwwu8UZ94NMXBvOELn6flSPiBrCKlehkiXyaqwA==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "7.12.0", + "@typescript-eslint/utils": "7.12.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.12.0.tgz", + "integrity": "sha512-o+0Te6eWp2ppKY3mLCU+YA9pVJxhUJE15FV7kxuD9jgwIAa+w/ycGJBMrYDTpVGUM/tgpa9SeMOugSabWFq7bg==", + "dev": true, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.12.0.tgz", + "integrity": "sha512-5bwqLsWBULv1h6pn7cMW5dXX/Y2amRqLaKqsASVwbBHMZSnHqE/HN4vT4fE0aFsiwxYvr98kqOWh1a8ZKXalCQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "7.12.0", + "@typescript-eslint/visitor-keys": "7.12.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.12.0.tgz", + "integrity": "sha512-Y6hhwxwDx41HNpjuYswYp6gDbkiZ8Hin9Bf5aJQn1bpTs3afYY4GX+MPYxma8jtoIV2GRwTM/UJm/2uGCVv+DQ==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "7.12.0", + "@typescript-eslint/types": "7.12.0", + "@typescript-eslint/typescript-estree": "7.12.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.12.0.tgz", + "integrity": "sha512-uZk7DevrQLL3vSnfFl5bj4sL75qC9D6EdjemIdbtkuUmIheWpuiiylSY01JxJE7+zGrOWDZrp1WxOuDntvKrHQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "7.12.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, + "node_modules/acorn": { + "version": "8.11.3", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", + "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", + "dev": true, + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/autoprefixer": { + "version": "10.4.19", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz", + "integrity": "sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "browserslist": "^4.23.0", + "caniuse-lite": "^1.0.30001599", + "fraction.js": "^4.3.7", + "normalize-range": "^0.1.2", + "picocolors": "^1.0.0", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/axobject-query": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.0.0.tgz", + "integrity": "sha512-+60uv1hiVFhHZeO+Lz0RYzsVHy5Wr1ayX0mwda9KPDVLNJgZ1T9Ny7VmFbLDzxsH0D87I86vgj3gFrjTJUYznw==", + "dev": true, + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.23.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", + "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001587", + "electron-to-chromium": "^1.4.668", + "node-releases": "^2.0.14", + "update-browserslist-db": "^1.0.13" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001624", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001624.tgz", + "integrity": "sha512-0dWnQG87UevOCPYaOR49CBcLBwoZLpws+k6W37nLjWUhumP1Isusj0p2u+3KhjNloRWK9OKMgjBBzPujQHw4nA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/code-red": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/code-red/-/code-red-1.0.4.tgz", + "integrity": "sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15", + "@types/estree": "^1.0.1", + "acorn": "^8.10.0", + "estree-walker": "^3.0.3", + "periscopic": "^3.1.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css-tree": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", + "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", + "dev": true, + "dependencies": { + "mdn-data": "2.0.30", + "source-map-js": "^1.0.1" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/detect-indent": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz", + "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/devalue": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/devalue/-/devalue-5.0.0.tgz", + "integrity": "sha512-gO+/OMXF7488D+u3ue+G7Y4AA3ZmUnB3eHJXmBTgNHvr4ZNzl36A0ZtG+XCRNYCkYx/bFmw4qtkoFLa+wSrwAA==", + "dev": true + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, + "node_modules/electron-to-chromium": { + "version": "1.4.783", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.783.tgz", + "integrity": "sha512-bT0jEz/Xz1fahQpbZ1D7LgmPYZ3iHVY39NcWWro1+hA2IvjiPeaXtfSqrQ+nXjApMvQRE2ASt1itSLRrebHMRQ==", + "dev": true + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/es6-promise": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", + "integrity": "sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==", + "dev": true + }, + "node_modules/esbuild": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", + "integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.20.2", + "@esbuild/android-arm": "0.20.2", + "@esbuild/android-arm64": "0.20.2", + "@esbuild/android-x64": "0.20.2", + "@esbuild/darwin-arm64": "0.20.2", + "@esbuild/darwin-x64": "0.20.2", + "@esbuild/freebsd-arm64": "0.20.2", + "@esbuild/freebsd-x64": "0.20.2", + "@esbuild/linux-arm": "0.20.2", + "@esbuild/linux-arm64": "0.20.2", + "@esbuild/linux-ia32": "0.20.2", + "@esbuild/linux-loong64": "0.20.2", + "@esbuild/linux-mips64el": "0.20.2", + "@esbuild/linux-ppc64": "0.20.2", + "@esbuild/linux-riscv64": "0.20.2", + "@esbuild/linux-s390x": "0.20.2", + "@esbuild/linux-x64": "0.20.2", + "@esbuild/netbsd-x64": "0.20.2", + "@esbuild/openbsd-x64": "0.20.2", + "@esbuild/sunos-x64": "0.20.2", + "@esbuild/win32-arm64": "0.20.2", + "@esbuild/win32-ia32": "0.20.2", + "@esbuild/win32-x64": "0.20.2" + } + }, + "node_modules/escalade": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-compat-utils": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/eslint-compat-utils/-/eslint-compat-utils-0.5.0.tgz", + "integrity": "sha512-dc6Y8tzEcSYZMHa+CMPLi/hyo1FzNeonbhJL7Ol0ccuKQkwopJcJBA9YL/xmMTLU1eKigXo9vj9nALElWYSowg==", + "dev": true, + "dependencies": { + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "eslint": ">=6.0.0" + } + }, + "node_modules/eslint-config-prettier": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", + "dev": true, + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-svelte": { + "version": "2.39.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-svelte/-/eslint-plugin-svelte-2.39.0.tgz", + "integrity": "sha512-FXktBLXsrxbA+6ZvJK2z/sQOrUKyzSg3fNWK5h0reSCjr2fjAsc9ai/s/JvSl4Hgvz3nYVtTIMwarZH5RcB7BA==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@jridgewell/sourcemap-codec": "^1.4.15", + "debug": "^4.3.4", + "eslint-compat-utils": "^0.5.0", + "esutils": "^2.0.3", + "known-css-properties": "^0.31.0", + "postcss": "^8.4.38", + "postcss-load-config": "^3.1.4", + "postcss-safe-parser": "^6.0.0", + "postcss-selector-parser": "^6.0.16", + "semver": "^7.6.0", + "svelte-eslint-parser": ">=0.36.0 <1.0.0" + }, + "engines": { + "node": "^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0-0 || ^9.0.0-0", + "svelte": "^3.37.0 || ^4.0.0 || ^5.0.0-next.112" + }, + "peerDependenciesMeta": { + "svelte": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/esm-env": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.0.0.tgz", + "integrity": "sha512-Cf6VksWPsTuW01vU9Mk/3vRue91Zevka5SjyNf3nEpokFRuqt/KjUQoGAwq9qMmhpLTHmXzSIrFRw8zxWzmFBA==", + "dev": true + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", + "dev": true + }, + "node_modules/foreground-child": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", + "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/fraction.js": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", + "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", + "dev": true, + "engines": { + "node": "*" + }, + "funding": { + "type": "patreon", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globalyzer": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/globalyzer/-/globalyzer-0.1.0.tgz", + "integrity": "sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q==", + "dev": true + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globrex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/globrex/-/globrex-0.1.2.tgz", + "integrity": "sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==", + "dev": true + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ignore": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", + "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-meta-resolve": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/import-meta-resolve/-/import-meta-resolve-4.1.0.tgz", + "integrity": "sha512-I6fiaX09Xivtk+THaMfAwnA3MVA5Big1WHF1Dfx9hFuvNIWpXnorlkzhcQf6ehrqQiiZECRt1poOAkPmer3ruw==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-reference": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.2.tgz", + "integrity": "sha512-v3rht/LgVcsdZa3O2Nqs+NMowLOxeOm7Ay9+/ARQ2F+qEoANRcqrjAZKGN0v8ymUetZGgkp26LTnGT7H0Qo9Pg==", + "dev": true, + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/jackspeak": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.1.2.tgz", + "integrity": "sha512-kWmLKn2tRtfYMF/BakihVVRzBKOxz4gJMiL2Rj91WnAB5TPZumSH99R/Yf1qE1u4uRimvCSJfm6hnxohXeEXjQ==", + "dev": true, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jiti": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.0.tgz", + "integrity": "sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q==", + "dev": true, + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/known-css-properties": { + "version": "0.31.0", + "resolved": "https://registry.npmjs.org/known-css-properties/-/known-css-properties-0.31.0.tgz", + "integrity": "sha512-sBPIUGTNF0czz0mwGGUoKKJC8Q7On1GPbCSFPfyEsfHb2DyBG0Y4QtV+EVWpINSaiGKZblDNuF5AezxSgOhesQ==", + "dev": true + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lilconfig": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/locate-character": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz", + "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==", + "dev": true + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", + "dev": true, + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/magic-string": { + "version": "0.30.10", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.10.tgz", + "integrity": "sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + } + }, + "node_modules/mdn-data": { + "version": "2.0.30", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", + "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", + "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "dev": true, + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dev": true, + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/mrmime": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.0.tgz", + "integrity": "sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", + "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/node-releases": { + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", + "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==", + "dev": true + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-range": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/periscopic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz", + "integrity": "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==", + "dev": true, + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^3.0.0", + "is-reference": "^3.0.0" + } + }, + "node_modules/picocolors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/piexifjs": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/piexifjs/-/piexifjs-1.0.6.tgz", + "integrity": "sha512-0wVyH0cKohzBQ5Gi2V1BuxYpxWfxF3cSqfFXfPIpl5tl9XLS5z4ogqhUCD20AbHi0h9aJkqXNJnkVev6gwh2ag==" + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss": { + "version": "8.4.38", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", + "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.0.0", + "source-map-js": "^1.2.0" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz", + "integrity": "sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==", + "dev": true, + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-load-config": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.4.tgz", + "integrity": "sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==", + "dev": true, + "dependencies": { + "lilconfig": "^2.0.5", + "yaml": "^1.10.2" + }, + "engines": { + "node": ">= 10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": ">=8.0.9", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/postcss-nested": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.0.1.tgz", + "integrity": "sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.11" + }, + "engines": { + "node": ">=12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-safe-parser": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-safe-parser/-/postcss-safe-parser-6.0.0.tgz", + "integrity": "sha512-FARHN8pwH+WiS2OPCxJI8FuRJpTVnn6ZNFiqAM2aeW2LwTHWWmWgIyKC6cUo0L8aeKiF/14MNvnpls6R2PBeMQ==", + "dev": true, + "engines": { + "node": ">=12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.3.3" + } + }, + "node_modules/postcss-scss": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/postcss-scss/-/postcss-scss-4.0.9.tgz", + "integrity": "sha512-AjKOeiwAitL/MXxQW2DliT28EKukvvbEWx3LBmJIRN8KfBGZbRTxNYW0kSqi1COiTZ57nZ9NW06S6ux//N1c9A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss-scss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.4.29" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.0.tgz", + "integrity": "sha512-UMz42UD0UY0EApS0ZL9o1XnLhSTtvvvLe5Dc2H2O56fvRZi+KulDyf5ctDhhtYJBGKStV2FL1fy6253cmLgqVQ==", + "dev": true, + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.1.tgz", + "integrity": "sha512-7CAwy5dRsxs8PHXT3twixW9/OEll8MLE0VRPCJyl7CkS6VHGPSlsVaWTiASPTyGyYRyApxlaWTzwUxVNrhcwDg==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-plugin-organize-imports": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-3.2.4.tgz", + "integrity": "sha512-6m8WBhIp0dfwu0SkgfOxJqh+HpdyfqSSLfKKRZSFbDuEQXDDndb8fTpRWkUrX/uBenkex3MgnVk0J3b3Y5byog==", + "dev": true, + "peerDependencies": { + "@volar/vue-language-plugin-pug": "^1.0.4", + "@volar/vue-typescript": "^1.0.4", + "prettier": ">=2.0", + "typescript": ">=2.9" + }, + "peerDependenciesMeta": { + "@volar/vue-language-plugin-pug": { + "optional": true + }, + "@volar/vue-typescript": { + "optional": true + } + } + }, + "node_modules/prettier-plugin-svelte": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/prettier-plugin-svelte/-/prettier-plugin-svelte-3.2.4.tgz", + "integrity": "sha512-tZv+ADfeOWFNQkXkRh6zUXE16w3Vla8x2Ug0B/EnSmjR4EnwdwZbGgL/liSwR1kcEALU5mAAyua98HBxheCxgg==", + "dev": true, + "peerDependencies": { + "prettier": "^3.0.0", + "svelte": "^3.2.0 || ^4.0.0-next.0 || ^5.0.0-next.0" + } + }, + "node_modules/prettier-plugin-tailwindcss": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/prettier-plugin-tailwindcss/-/prettier-plugin-tailwindcss-0.6.1.tgz", + "integrity": "sha512-AnbeYZu0WGj+QgKciUgdMnRxrqcxltleZPgdwfA5104BHM3siBLONN/HLW1yS2HvzSNkzpQ/JAj+LN0jcJO+0w==", + "dev": true, + "engines": { + "node": ">=14.21.3" + }, + "peerDependencies": { + "@ianvs/prettier-plugin-sort-imports": "*", + "@prettier/plugin-pug": "*", + "@shopify/prettier-plugin-liquid": "*", + "@trivago/prettier-plugin-sort-imports": "*", + "@zackad/prettier-plugin-twig-melody": "*", + "prettier": "^3.0", + "prettier-plugin-astro": "*", + "prettier-plugin-css-order": "*", + "prettier-plugin-import-sort": "*", + "prettier-plugin-jsdoc": "*", + "prettier-plugin-marko": "*", + "prettier-plugin-organize-attributes": "*", + "prettier-plugin-organize-imports": "*", + "prettier-plugin-sort-imports": "*", + "prettier-plugin-style-order": "*", + "prettier-plugin-svelte": "*" + }, + "peerDependenciesMeta": { + "@ianvs/prettier-plugin-sort-imports": { + "optional": true + }, + "@prettier/plugin-pug": { + "optional": true + }, + "@shopify/prettier-plugin-liquid": { + "optional": true + }, + "@trivago/prettier-plugin-sort-imports": { + "optional": true + }, + "@zackad/prettier-plugin-twig-melody": { + "optional": true + }, + "prettier-plugin-astro": { + "optional": true + }, + "prettier-plugin-css-order": { + "optional": true + }, + "prettier-plugin-import-sort": { + "optional": true + }, + "prettier-plugin-jsdoc": { + "optional": true + }, + "prettier-plugin-marko": { + "optional": true + }, + "prettier-plugin-organize-attributes": { + "optional": true + }, + "prettier-plugin-organize-imports": { + "optional": true + }, + "prettier-plugin-sort-imports": { + "optional": true + }, + "prettier-plugin-style-order": { + "optional": true + }, + "prettier-plugin-svelte": { + "optional": true + } + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rollup": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.18.0.tgz", + "integrity": "sha512-QmJz14PX3rzbJCN1SG4Xe/bAAX2a6NpCP8ab2vfu2GiUr8AQcr2nCV/oEO3yneFarB67zk8ShlIyWb2LGTb3Sg==", + "dev": true, + "dependencies": { + "@types/estree": "1.0.5" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.18.0", + "@rollup/rollup-android-arm64": "4.18.0", + "@rollup/rollup-darwin-arm64": "4.18.0", + "@rollup/rollup-darwin-x64": "4.18.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.18.0", + "@rollup/rollup-linux-arm-musleabihf": "4.18.0", + "@rollup/rollup-linux-arm64-gnu": "4.18.0", + "@rollup/rollup-linux-arm64-musl": "4.18.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.18.0", + "@rollup/rollup-linux-riscv64-gnu": "4.18.0", + "@rollup/rollup-linux-s390x-gnu": "4.18.0", + "@rollup/rollup-linux-x64-gnu": "4.18.0", + "@rollup/rollup-linux-x64-musl": "4.18.0", + "@rollup/rollup-win32-arm64-msvc": "4.18.0", + "@rollup/rollup-win32-ia32-msvc": "4.18.0", + "@rollup/rollup-win32-x64-msvc": "4.18.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rvfc-polyfill": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/rvfc-polyfill/-/rvfc-polyfill-1.0.7.tgz", + "integrity": "sha512-seBl7J1J3/k0LuzW2T9fG6JIOpni5AbU+/87LA+zTYKgTVhsfShmS8K/yOo1eeEjGJHnAdkVAUUM+PEjN9Mpkw==" + }, + "node_modules/sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "dev": true, + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/sander": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/sander/-/sander-0.5.1.tgz", + "integrity": "sha512-3lVqBir7WuKDHGrKRDn/1Ye3kwpXaDOMsiRP1wd6wpZW56gJhsbp5RqQpA6JG/P+pkXizygnr1dKR8vzWaVsfA==", + "dev": true, + "dependencies": { + "es6-promise": "^3.1.2", + "graceful-fs": "^4.1.3", + "mkdirp": "^0.5.1", + "rimraf": "^2.5.2" + } + }, + "node_modules/sander/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/semver": { + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-cookie-parser": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.6.0.tgz", + "integrity": "sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ==", + "dev": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sirv": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-2.0.4.tgz", + "integrity": "sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==", + "dev": true, + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/sorcery": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/sorcery/-/sorcery-0.11.0.tgz", + "integrity": "sha512-J69LQ22xrQB1cIFJhPfgtLuI6BpWRiWu1Y3vSsIwK/eAScqJxd/+CJlUuHQRdX2C9NGFamq+KqNywGgaThwfHw==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.14", + "buffer-crc32": "^0.2.5", + "minimist": "^1.2.0", + "sander": "^0.5.0" + }, + "bin": { + "sorcery": "bin/sorcery" + } + }, + "node_modules/source-map-js": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/string-width/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/string-width/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/sucrase": { + "version": "3.35.0", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.0.tgz", + "integrity": "sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "glob": "^10.3.10", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/sucrase/node_modules/glob": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.1.tgz", + "integrity": "sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw==", + "dev": true, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/svelte": { + "version": "4.2.18", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-4.2.18.tgz", + "integrity": "sha512-d0FdzYIiAePqRJEb90WlJDkjUEx42xhivxN8muUBmfZnP+tzUgz12DJ2hRJi8sIHCME7jeK1PTMgKPSfTd8JrA==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.2.1", + "@jridgewell/sourcemap-codec": "^1.4.15", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/estree": "^1.0.1", + "acorn": "^8.9.0", + "aria-query": "^5.3.0", + "axobject-query": "^4.0.0", + "code-red": "^1.0.3", + "css-tree": "^2.3.1", + "estree-walker": "^3.0.3", + "is-reference": "^3.0.1", + "locate-character": "^3.0.0", + "magic-string": "^0.30.4", + "periscopic": "^3.1.0" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/svelte-check": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-3.8.0.tgz", + "integrity": "sha512-7Nxn+3X97oIvMzYJ7t27w00qUf1Y52irE2RU2dQAd5PyvfGp4E7NLhFKVhb6PV2fx7dCRMpNKDIuazmGthjpSQ==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.17", + "chokidar": "^3.4.1", + "fast-glob": "^3.2.7", + "import-fresh": "^3.2.1", + "picocolors": "^1.0.0", + "sade": "^1.7.4", + "svelte-preprocess": "^5.1.3", + "typescript": "^5.0.3" + }, + "bin": { + "svelte-check": "bin/svelte-check" + }, + "peerDependencies": { + "svelte": "^3.55.0 || ^4.0.0-next.0 || ^4.0.0 || ^5.0.0-next.0" + } + }, + "node_modules/svelte-eslint-parser": { + "version": "0.36.0", + "resolved": "https://registry.npmjs.org/svelte-eslint-parser/-/svelte-eslint-parser-0.36.0.tgz", + "integrity": "sha512-/6YmUSr0FAVxW8dXNdIMydBnddPMHzaHirAZ7RrT21XYdgGGZMh0LQG6CZsvAFS4r2Y4ItUuCQc8TQ3urB30mQ==", + "dev": true, + "dependencies": { + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "postcss": "^8.4.38", + "postcss-scss": "^4.0.9" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + }, + "peerDependencies": { + "svelte": "^3.37.0 || ^4.0.0 || ^5.0.0-next.115" + }, + "peerDependenciesMeta": { + "svelte": { + "optional": true + } + } + }, + "node_modules/svelte-hmr": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/svelte-hmr/-/svelte-hmr-0.16.0.tgz", + "integrity": "sha512-Gyc7cOS3VJzLlfj7wKS0ZnzDVdv3Pn2IuVeJPk9m2skfhcu5bq3wtIZyQGggr7/Iim5rH5cncyQft/kRLupcnA==", + "dev": true, + "engines": { + "node": "^12.20 || ^14.13.1 || >= 16" + }, + "peerDependencies": { + "svelte": "^3.19.0 || ^4.0.0" + } + }, + "node_modules/svelte-preprocess": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/svelte-preprocess/-/svelte-preprocess-5.1.4.tgz", + "integrity": "sha512-IvnbQ6D6Ao3Gg6ftiM5tdbR6aAETwjhHV+UKGf5bHGYR69RQvF1ho0JKPcbUON4vy4R7zom13jPjgdOWCQ5hDA==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "@types/pug": "^2.0.6", + "detect-indent": "^6.1.0", + "magic-string": "^0.30.5", + "sorcery": "^0.11.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">= 16.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.10.2", + "coffeescript": "^2.5.1", + "less": "^3.11.3 || ^4.0.0", + "postcss": "^7 || ^8", + "postcss-load-config": "^2.1.0 || ^3.0.0 || ^4.0.0 || ^5.0.0", + "pug": "^3.0.0", + "sass": "^1.26.8", + "stylus": "^0.55.0", + "sugarss": "^2.0.0 || ^3.0.0 || ^4.0.0", + "svelte": "^3.23.0 || ^4.0.0-next.0 || ^4.0.0 || ^5.0.0-next.0", + "typescript": ">=3.9.5 || ^4.0.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "coffeescript": { + "optional": true + }, + "less": { + "optional": true + }, + "postcss": { + "optional": true + }, + "postcss-load-config": { + "optional": true + }, + "pug": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "typescript": { + "optional": true + } + } + }, + "node_modules/tailwindcss": { + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.4.tgz", + "integrity": "sha512-ZoyXOdJjISB7/BcLTR6SEsLgKtDStYyYZVLsUtWChO4Ps20CBad7lfJKVDiejocV4ME1hLmyY0WJE3hSDcmQ2A==", + "dev": true, + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.5.3", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.0", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.0", + "lilconfig": "^2.1.0", + "micromatch": "^4.0.5", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.0.0", + "postcss": "^8.4.23", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.1", + "postcss-nested": "^6.0.1", + "postcss-selector-parser": "^6.0.11", + "resolve": "^1.22.2", + "sucrase": "^3.32.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tailwindcss/node_modules/postcss-load-config": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.2.tgz", + "integrity": "sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "lilconfig": "^3.0.0", + "yaml": "^2.3.4" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "postcss": ">=8.0.9", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/tailwindcss/node_modules/postcss-load-config/node_modules/lilconfig": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.1.tgz", + "integrity": "sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/tailwindcss/node_modules/yaml": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.2.tgz", + "integrity": "sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==", + "dev": true, + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/tiny-glob": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/tiny-glob/-/tiny-glob-0.2.9.tgz", + "integrity": "sha512-g/55ssRPUjShh+xkfx9UPDXqhckHEsHr4Vd9zX55oSdGZc/MD0m3sferOkwWtp98bv+kcVfEHtRJgBVJzelrzg==", + "dev": true, + "dependencies": { + "globalyzer": "0.1.0", + "globrex": "^0.1.2" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/totalist": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ts-api-utils": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", + "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==", + "dev": true, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true + }, + "node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", + "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.0.16", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.16.tgz", + "integrity": "sha512-KVbTxlBYlckhF5wgfyZXTWnMn7MMZjMu9XG8bPlliUOP9ThaF4QnhP8qrjrH7DRzHfSk0oQv1wToW+iA5GajEQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.1.2", + "picocolors": "^1.0.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/vite": { + "version": "5.2.12", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.12.tgz", + "integrity": "sha512-/gC8GxzxMK5ntBwb48pR32GGhENnjtY30G4A0jemunsBkiEZFw60s8InGpN8gkhHEkjnRK1aSAxeQgwvFhUHAA==", + "dev": true, + "dependencies": { + "esbuild": "^0.20.1", + "postcss": "^8.4.38", + "rollup": "^4.13.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vitefu": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-0.2.5.tgz", + "integrity": "sha512-SgHtMLoqaeeGnd2evZ849ZbACbnwQCIwRH57t18FxcXoZop0uQu0uzlIhJBlF/eWVzuce0sHeqPcDo+evVcg8Q==", + "dev": true, + "peerDependencies": { + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "vite": { + "optional": true + } + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000000000000000000000000000000000000..0b64feebcc38e074cf33cc2eae59c7cf18426934 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,43 @@ +{ + "name": "frontend", + "version": "0.0.1", + "private": true, + "scripts": { + "dev": "vite dev", + "build": "vite build", + "preview": "vite preview", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", + "lint": "prettier --check . && eslint .", + "format": "prettier --write ." + }, + "devDependencies": { + "@sveltejs/adapter-auto": "^3.0.0", + "@sveltejs/adapter-static": "^3", + "@sveltejs/kit": "^2.0.0", + "@sveltejs/vite-plugin-svelte": "^3.1.1", + "@types/eslint": "^8.56.0", + "@typescript-eslint/eslint-plugin": "^7.12.0", + "@typescript-eslint/parser": "^7.12.0", + "autoprefixer": "^10.4.16", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-svelte": "^2.35.1", + "postcss": "^8.4.31", + "prettier": "^3.3.1", + "prettier-plugin-organize-imports": "^3.2.4", + "prettier-plugin-svelte": "^3.2.4", + "prettier-plugin-tailwindcss": "^0.6.1", + "svelte": "^4.2.18", + "svelte-check": "^3.8.0", + "tailwindcss": "^3.4.4", + "tslib": "^2.6.3", + "typescript": "^5.0.0", + "vite": "^5.0.3" + }, + "type": "module", + "dependencies": { + "piexifjs": "^1.0.6", + "rvfc-polyfill": "^1.0.7" + } +} diff --git a/frontend/postcss.config.js b/frontend/postcss.config.js new file mode 100644 index 0000000000000000000000000000000000000000..ba80730477d1dd22b1484282d594f2e5dda4785a --- /dev/null +++ b/frontend/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {} + } +}; diff --git a/frontend/src/app.css b/frontend/src/app.css new file mode 100644 index 0000000000000000000000000000000000000000..b5c61c956711f981a41e95f7fcf0038436cfbb22 --- /dev/null +++ b/frontend/src/app.css @@ -0,0 +1,3 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; diff --git a/frontend/src/app.d.ts b/frontend/src/app.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..899c7e8fcaa1cc213388dc711f798ae20b622420 --- /dev/null +++ b/frontend/src/app.d.ts @@ -0,0 +1,12 @@ +// See https://kit.svelte.dev/docs/types#app +// for information about these interfaces +declare global { + namespace App { + // interface Error {} + // interface Locals {} + // interface PageData {} + // interface Platform {} + } +} + +export {}; diff --git a/frontend/src/app.html b/frontend/src/app.html new file mode 100644 index 0000000000000000000000000000000000000000..84ffad1665e662132d61ef780fd03b6a36df0b91 --- /dev/null +++ b/frontend/src/app.html @@ -0,0 +1,12 @@ + + +
+ + + + %sveltekit.head% + + ++ There are {currentQueueSize} + user(s) sharing the same GPU, affecting real-time performance. Maximum queue size is {maxQueueSize}. + Duplicate and run it on your own GPU. +
+ {/if} +Loading...
++ This demo showcases + LCM LoRA + ControlNet + Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "controlnet" + title: str = "LCM + Controlnet" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 2, min=1, max=6, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 512, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 512, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 0.0, + min=0, + max=2, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.5, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + controlnet_scale: float = Field( + 0.8, + min=0, + max=1.0, + step=0.001, + title="Controlnet Scale", + field="range", + hide=True, + id="controlnet_scale", + ) + controlnet_start: float = Field( + 0.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet Start", + field="range", + hide=True, + id="controlnet_start", + ) + controlnet_end: float = Field( + 1.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet End", + field="range", + hide=True, + id="controlnet_end", + ) + canny_low_threshold: float = Field( + 0.31, + min=0, + max=1.0, + step=0.001, + title="Canny Low Threshold", + field="range", + hide=True, + id="canny_low_threshold", + ) + canny_high_threshold: float = Field( + 0.125, + min=0, + max=1.0, + step=0.001, + title="Canny High Threshold", + field="range", + hide=True, + id="canny_high_threshold", + ) + debug_canny: bool = Field( + False, + title="Debug Canny", + field="checkbox", + hide=True, + id="debug_canny", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + controlnet_canny = ControlNetModel.from_pretrained( + controlnet_model, torch_dtype=torch_dtype + ).to(device) + + self.pipe = StableDiffusionControlNetImg2ImgPipeline.from_pretrained( + base_model, + safety_checker=None, + controlnet=controlnet_canny, + ) + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.sfast: + print("\nRunning sfast compile\n") + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + if args.onediff: + print("\nRunning onediff compile\n") + from onediff.infer_compiler import oneflow_compile + + self.pipe.unet = oneflow_compile(self.pipe.unet) + self.pipe.vae.encoder = oneflow_compile(self.pipe.vae.encoder) + self.pipe.vae.decoder = oneflow_compile(self.pipe.vae.decoder) + self.pipe.controlnet = oneflow_compile(self.pipe.controlnet) + + self.canny_torch = SobelOperator(device=device) + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.torch_compile: + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + control_image=[Image.new("RGB", (768, 768))], + ) + if args.compel: + self.compel_proc = Compel( + tokenizer=self.pipe.tokenizer, + text_encoder=self.pipe.text_encoder, + truncate_long_prompts=False, + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + prompt_embeds = None + prompt = params.prompt + if hasattr(self, "compel_proc"): + prompt_embeds = self.compel_proc(params.prompt) + prompt = None + + control_image = self.canny_torch( + params.image, params.canny_low_threshold, params.canny_high_threshold + ) + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + results = self.pipe( + image=params.image, + control_image=control_image, + prompt_embeds=prompt_embeds, + prompt=prompt, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + controlnet_conditioning_scale=params.controlnet_scale, + control_guidance_start=params.controlnet_start, + control_guidance_end=params.controlnet_end, + ) + result_image = results.images[0] + if params.debug_canny: + # paste control_image on top of result_image + w0, h0 = (200, 200) + control_image = control_image.resize((w0, h0)) + w1, h1 = result_image.size + result_image.paste(control_image, (w1 - w0, h1 - h0)) + + return result_image diff --git a/server/pipelines/controlnetDepthFlashSD.py b/server/pipelines/controlnetDepthFlashSD.py new file mode 100644 index 0000000000000000000000000000000000000000..9a63705533411c7bd4ba99c2bec515e813317708 --- /dev/null +++ b/server/pipelines/controlnetDepthFlashSD.py @@ -0,0 +1,264 @@ +from diffusers import ( + StableDiffusionControlNetImg2ImgPipeline, + ControlNetModel, + AutoencoderTiny, + LCMScheduler, +) +from compel import Compel, ReturnedEmbeddingsType +import torch +from transformers import pipeline + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +controlnet_model = "lllyasviel/control_v11f1p_sd15_depth" +model_id = "runwayml/stable-diffusion-v1-5" +taesd_model = "madebyollin/taesd" + +default_prompt = "Portrait of The Terminator with , glare pose, detailed, intricate, full of colour, cinematic lighting, trending on artstation, 8k, hyperrealistic, focused, extreme details, unreal engine 5 cinematic, masterpiece" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + LCM LoRA ++ ControlNet + Image to Imasge pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "controlnet+loras+sd15" + title: str = "LCM + LoRA + Controlnet" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + base_model_id: str = Field( + "plasmo/woolitize", + title="Base Model", + values=list(base_models.keys()), + field="select", + id="base_model_id", + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 1, min=1, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 768, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 768, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 1.0, + min=0, + max=2, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.5, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + controlnet_scale: float = Field( + 0.8, + min=0, + max=1.0, + step=0.001, + title="Controlnet Scale", + field="range", + hide=True, + id="controlnet_scale", + ) + controlnet_start: float = Field( + 0.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet Start", + field="range", + hide=True, + id="controlnet_start", + ) + controlnet_end: float = Field( + 1.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet End", + field="range", + hide=True, + id="controlnet_end", + ) + canny_low_threshold: float = Field( + 0.31, + min=0, + max=1.0, + step=0.001, + title="Canny Low Threshold", + field="range", + hide=True, + id="canny_low_threshold", + ) + canny_high_threshold: float = Field( + 0.125, + min=0, + max=1.0, + step=0.001, + title="Canny High Threshold", + field="range", + hide=True, + id="canny_high_threshold", + ) + debug_canny: bool = Field( + False, + title="Debug Canny", + field="checkbox", + hide=True, + id="debug_canny", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + controlnet_canny = ControlNetModel.from_pretrained( + controlnet_model, torch_dtype=torch_dtype + ).to(device) + + self.pipes = {} + + for base_model_id in base_models.keys(): + pipe = StableDiffusionControlNetImg2ImgPipeline.from_pretrained( + base_model_id, + safety_checker=None, + controlnet=controlnet_canny, + ) + self.pipes[base_model_id] = pipe + + self.canny_torch = SobelOperator(device=device) + + for pipe in self.pipes.values(): + pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config) + pipe.set_progress_bar_config(disable=True) + if device.type != "mps": + pipe.unet.to(memory_format=torch.channels_last) + + if args.taesd: + pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + # Load LCM LoRA + pipe.load_lora_weights(lcm_lora_id, adapter_name="lcm") + pipe.to(device=device, dtype=torch_dtype).to(device) + if args.compel: + self.compel_proc = Compel( + tokenizer=pipe.tokenizer, + text_encoder=pipe.text_encoder, + truncate_long_prompts=False, + ) + if args.torch_compile: + pipe.unet = torch.compile( + pipe.unet, mode="reduce-overhead", fullgraph=True + ) + pipe.vae = torch.compile( + pipe.vae, mode="reduce-overhead", fullgraph=True + ) + pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + control_image=[Image.new("RGB", (768, 768))], + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + pipe = self.pipes[params.base_model_id] + + activation_token = base_models[params.base_model_id] + prompt = f"{activation_token} {params.prompt}" + prompt_embeds = None + prompt = params.prompt + if hasattr(self, "compel_proc"): + prompt_embeds = self.compel_proc(prompt) + prompt = None + + control_image = self.canny_torch( + params.image, params.canny_low_threshold, params.canny_high_threshold + ) + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + results = pipe( + image=params.image, + control_image=control_image, + prompt=prompt, + prompt_embeds=prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + controlnet_conditioning_scale=params.controlnet_scale, + control_guidance_start=params.controlnet_start, + control_guidance_end=params.controlnet_end, + ) + + result_image = results.images[0] + if params.debug_canny: + # paste control_image on top of result_image + w0, h0 = (200, 200) + control_image = control_image.resize((w0, h0)) + w1, h1 = result_image.size + result_image.paste(control_image, (w1 - w0, h1 - h0)) + + return result_image diff --git a/server/pipelines/controlnetLoraSD15QRCode.py b/server/pipelines/controlnetLoraSD15QRCode.py new file mode 100644 index 0000000000000000000000000000000000000000..0c4f56840c04ffe966c4b191439894bf31a6eb69 --- /dev/null +++ b/server/pipelines/controlnetLoraSD15QRCode.py @@ -0,0 +1,226 @@ +from diffusers import ( + StableDiffusionControlNetImg2ImgPipeline, + ControlNetModel, + LCMScheduler, + AutoencoderTiny, +) +from compel import Compel +import torch + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +taesd_model = "madebyollin/taesd" +controlnet_model = "monster-labs/control_v1p_sd15_qrcode_monster" +base_model = "nitrosocke/mo-di-diffusion" +lcm_lora_id = "latent-consistency/lcm-lora-sdv1-5" +default_prompt = "abstract art of a men with curly hair by Pablo Picasso" +page_content = """ ++ This demo showcases + LCM LoRA ++ ControlNet + Image to Imasge pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "controlnet+loras+sd15" + title: str = "LCM + LoRA + Controlnet" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 5, min=1, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 512, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 512, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 1.0, + min=0, + max=2, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.6, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + controlnet_scale: float = Field( + 1.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet Scale", + field="range", + hide=True, + id="controlnet_scale", + ) + controlnet_start: float = Field( + 0.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet Start", + field="range", + hide=True, + id="controlnet_start", + ) + controlnet_end: float = Field( + 1.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet End", + field="range", + hide=True, + id="controlnet_end", + ) + blend: float = Field( + 0.1, + min=0.0, + max=1.0, + step=0.001, + title="Blend", + field="range", + hide=True, + id="blend", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + controlnet_qrcode = ControlNetModel.from_pretrained( + controlnet_model, torch_dtype=torch_dtype, subfolder="v2" + ).to(device) + + if args.safety_checker: + self.pipe = StableDiffusionControlNetImg2ImgPipeline.from_pretrained( + base_model, + controlnet=controlnet_qrcode, + ) + else: + self.pipe = StableDiffusionControlNetImg2ImgPipeline.from_pretrained( + base_model, + safety_checker=None, + controlnet=controlnet_qrcode, + ) + + self.control_image = Image.open("qr-code.png").convert("RGB").resize((512, 512)) + + self.pipe.scheduler = LCMScheduler.from_config(self.pipe.scheduler.config) + self.pipe.set_progress_bar_config(disable=True) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + # Load LCM LoRA + self.pipe.load_lora_weights(lcm_lora_id, adapter_name="lcm") + self.pipe.to(device=device, dtype=torch_dtype).to(device) + if args.compel: + self.compel_proc = Compel( + tokenizer=self.pipe.tokenizer, + text_encoder=self.pipe.text_encoder, + truncate_long_prompts=False, + ) + if args.torch_compile: + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (512, 512))], + control_image=[Image.new("RGB", (512, 512))], + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + + prompt = f"modern disney style {params.prompt}" + prompt_embeds = None + prompt = params.prompt + if hasattr(self, "compel_proc"): + prompt_embeds = self.compel_proc(prompt) + prompt = None + + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + blend_qr_image = Image.blend( + params.image, self.control_image, alpha=params.blend + ) + results = self.pipe( + image=blend_qr_image, + control_image=self.control_image, + prompt=prompt, + prompt_embeds=prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + controlnet_conditioning_scale=params.controlnet_scale, + control_guidance_start=params.controlnet_start, + control_guidance_end=params.controlnet_end, + ) + + return results.images[0] diff --git a/server/pipelines/controlnetLoraSDXL-Lightning.py b/server/pipelines/controlnetLoraSDXL-Lightning.py new file mode 100644 index 0000000000000000000000000000000000000000..c19cbdd221dd2365582ba5069c8d4b97cd0ecbcb --- /dev/null +++ b/server/pipelines/controlnetLoraSDXL-Lightning.py @@ -0,0 +1,296 @@ +from diffusers import ( + UNet2DConditionModel, + StableDiffusionXLControlNetImg2ImgPipeline, + ControlNetModel, + AutoencoderKL, + AutoencoderTiny, + EulerDiscreteScheduler, +) +from compel import Compel, ReturnedEmbeddingsType +import torch +from pipelines.utils.canny_gpu import SobelOperator +from huggingface_hub import hf_hub_download +from safetensors.torch import load_file + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +controlnet_model = "diffusers/controlnet-canny-sdxl-1.0-small" +base = "stabilityai/stable-diffusion-xl-base-1.0" +repo = "ByteDance/SDXL-Lightning" +ckpt = "sdxl_lightning_2step_unet.safetensors" +taesd_model = "madebyollin/taesdxl" +NUM_STEPS = 2 + + +default_prompt = "Portrait of The Terminator with , glare pose, detailed, intricate, full of colour, cinematic lighting, trending on artstation, 8k, hyperrealistic, focused, extreme details, unreal engine 5 cinematic, masterpiece" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + LCM LoRA ++ SDXL + Controlnet + Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "controlnet+loras+sdxl+lightning" + title: str = "SDXL + LCM + LoRA + Controlnet" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + negative_prompt: str = Field( + default_negative_prompt, + title="Negative Prompt", + field="textarea", + id="negative_prompt", + hide=True, + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + width: int = Field( + 1024, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 1024, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 0.0, + min=0, + max=2.0, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 1, + min=0.25, + max=1.0, + step=0.0001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + controlnet_scale: float = Field( + 0.5, + min=0, + max=1.0, + step=0.001, + title="Controlnet Scale", + field="range", + hide=True, + id="controlnet_scale", + ) + controlnet_start: float = Field( + 0.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet Start", + field="range", + hide=True, + id="controlnet_start", + ) + controlnet_end: float = Field( + 1.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet End", + field="range", + hide=True, + id="controlnet_end", + ) + canny_low_threshold: float = Field( + 0.31, + min=0, + max=1.0, + step=0.001, + title="Canny Low Threshold", + field="range", + hide=True, + id="canny_low_threshold", + ) + canny_high_threshold: float = Field( + 0.125, + min=0, + max=1.0, + step=0.001, + title="Canny High Threshold", + field="range", + hide=True, + id="canny_high_threshold", + ) + debug_canny: bool = Field( + False, + title="Debug Canny", + field="checkbox", + hide=True, + id="debug_canny", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + if args.taesd: + vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ) + else: + vae = AutoencoderKL.from_pretrained( + "madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch_dtype + ) + + controlnet_canny = ControlNetModel.from_pretrained( + controlnet_model, torch_dtype=torch_dtype + ) + + unet = UNet2DConditionModel.from_config(base, subfolder="unet") + unet.load_state_dict(load_file(hf_hub_download(repo, ckpt), device=device.type)) + self.pipe = StableDiffusionXLControlNetImg2ImgPipeline.from_pretrained( + base, + unet=unet, + torch_dtype=torch_dtype, + variant="fp16", + controlnet=controlnet_canny, + vae=vae, + ) + + # Ensure sampler uses "trailing" timesteps. + self.pipe.scheduler = EulerDiscreteScheduler.from_config( + self.pipe.scheduler.config, timestep_spacing="trailing" + ) + + self.canny_torch = SobelOperator(device=device) + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.compel: + self.pipe.compel_proc = Compel( + tokenizer=[self.pipe.tokenizer, self.pipe.tokenizer_2], + text_encoder=[self.pipe.text_encoder, self.pipe.text_encoder_2], + returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED, + requires_pooled=[False, True], + ) + + if args.torch_compile: + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + control_image=[Image.new("RGB", (768, 768))], + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image | None: + generator = torch.manual_seed(params.seed) + + prompt = params.prompt + negative_prompt = params.negative_prompt + prompt_embeds = None + pooled_prompt_embeds = None + negative_prompt_embeds = None + negative_pooled_prompt_embeds = None + if hasattr(self.pipe, "compel_proc"): + _prompt_embeds, pooled_prompt_embeds = self.pipe.compel_proc( + [params.prompt, params.negative_prompt] + ) + prompt = None + negative_prompt = None + prompt_embeds = _prompt_embeds[0:1] + pooled_prompt_embeds = pooled_prompt_embeds[0:1] + negative_prompt_embeds = _prompt_embeds[1:2] + negative_pooled_prompt_embeds = pooled_prompt_embeds[1:2] + + control_image = self.canny_torch( + params.image, params.canny_low_threshold, params.canny_high_threshold + ) + steps = NUM_STEPS + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + results = self.pipe( + image=params.image, + control_image=control_image, + prompt=prompt, + negative_prompt=negative_prompt, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + negative_pooled_prompt_embeds=negative_pooled_prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=NUM_STEPS, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + controlnet_conditioning_scale=params.controlnet_scale, + control_guidance_start=params.controlnet_start, + control_guidance_end=params.controlnet_end, + ) + result_image = results.images[0] + if params.debug_canny: + # paste control_image on top of result_image + w0, h0 = (200, 200) + control_image = control_image.resize((w0, h0)) + w1, h1 = result_image.size + result_image.paste(control_image, (w1 - w0, h1 - h0)) + + return result_image diff --git a/server/pipelines/controlnetLoraSDXL.py b/server/pipelines/controlnetLoraSDXL.py new file mode 100644 index 0000000000000000000000000000000000000000..44356542debff813412ef9ec4c6428ceca679e1f --- /dev/null +++ b/server/pipelines/controlnetLoraSDXL.py @@ -0,0 +1,294 @@ +from diffusers import ( + StableDiffusionXLControlNetImg2ImgPipeline, + ControlNetModel, + LCMScheduler, + AutoencoderKL, + AutoencoderTiny, +) +from compel import Compel, ReturnedEmbeddingsType +import torch +from pipelines.utils.canny_gpu import SobelOperator + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +controlnet_model = "diffusers/controlnet-canny-sdxl-1.0" +model_id = "stabilityai/stable-diffusion-xl-base-1.0" +lcm_lora_id = "latent-consistency/lcm-lora-sdxl" +taesd_model = "madebyollin/taesdxl" + + +default_prompt = "Portrait of The Terminator with , glare pose, detailed, intricate, full of colour, cinematic lighting, trending on artstation, 8k, hyperrealistic, focused, extreme details, unreal engine 5 cinematic, masterpiece" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + LCM LoRA ++ SDXL + Controlnet + Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "controlnet+loras+sdxl" + title: str = "SDXL + LCM + LoRA + Controlnet" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + negative_prompt: str = Field( + default_negative_prompt, + title="Negative Prompt", + field="textarea", + id="negative_prompt", + hide=True, + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 1, min=1, max=10, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 512, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 512, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 1.0, + min=0, + max=2.0, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 1, + min=0.25, + max=1.0, + step=0.0001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + controlnet_scale: float = Field( + 0.5, + min=0, + max=1.0, + step=0.001, + title="Controlnet Scale", + field="range", + hide=True, + id="controlnet_scale", + ) + controlnet_start: float = Field( + 0.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet Start", + field="range", + hide=True, + id="controlnet_start", + ) + controlnet_end: float = Field( + 1.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet End", + field="range", + hide=True, + id="controlnet_end", + ) + canny_low_threshold: float = Field( + 0.31, + min=0, + max=1.0, + step=0.001, + title="Canny Low Threshold", + field="range", + hide=True, + id="canny_low_threshold", + ) + canny_high_threshold: float = Field( + 0.125, + min=0, + max=1.0, + step=0.001, + title="Canny High Threshold", + field="range", + hide=True, + id="canny_high_threshold", + ) + debug_canny: bool = Field( + False, + title="Debug Canny", + field="checkbox", + hide=True, + id="debug_canny", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + controlnet_canny = ControlNetModel.from_pretrained( + controlnet_model, torch_dtype=torch_dtype + ).to(device) + vae = AutoencoderKL.from_pretrained( + "madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch_dtype + ) + self.pipe = StableDiffusionXLControlNetImg2ImgPipeline.from_pretrained( + model_id, + safety_checker=None, + controlnet=controlnet_canny, + vae=vae, + ) + self.canny_torch = SobelOperator(device=device) + # Load LCM LoRA + self.pipe.load_lora_weights(lcm_lora_id, adapter_name="lcm") + self.pipe.load_lora_weights( + "CiroN2022/toy-face", + weight_name="toy_face_sdxl.safetensors", + adapter_name="toy", + ) + self.pipe.set_adapters(["lcm", "toy"], adapter_weights=[1.0, 0.8]) + + self.pipe.scheduler = LCMScheduler.from_config(self.pipe.scheduler.config) + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype).to(device) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.compel: + self.pipe.compel_proc = Compel( + tokenizer=[self.pipe.tokenizer, self.pipe.tokenizer_2], + text_encoder=[self.pipe.text_encoder, self.pipe.text_encoder_2], + returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED, + requires_pooled=[False, True], + ) + + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.torch_compile: + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + control_image=[Image.new("RGB", (768, 768))], + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + + prompt = params.prompt + negative_prompt = params.negative_prompt + prompt_embeds = None + pooled_prompt_embeds = None + negative_prompt_embeds = None + negative_pooled_prompt_embeds = None + if hasattr(self.pipe, "compel_proc"): + _prompt_embeds, pooled_prompt_embeds = self.pipe.compel_proc( + [params.prompt, params.negative_prompt] + ) + prompt = None + negative_prompt = None + prompt_embeds = _prompt_embeds[0:1] + pooled_prompt_embeds = pooled_prompt_embeds[0:1] + negative_prompt_embeds = _prompt_embeds[1:2] + negative_pooled_prompt_embeds = pooled_prompt_embeds[1:2] + + control_image = self.canny_torch( + params.image, params.canny_low_threshold, params.canny_high_threshold + ) + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + results = self.pipe( + image=params.image, + control_image=control_image, + prompt=prompt, + negative_prompt=negative_prompt, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + negative_pooled_prompt_embeds=negative_pooled_prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + controlnet_conditioning_scale=params.controlnet_scale, + control_guidance_start=params.controlnet_start, + control_guidance_end=params.controlnet_end, + ) + + result_image = results.images[0] + if params.debug_canny: + # paste control_image on top of result_image + w0, h0 = (200, 200) + control_image = control_image.resize((w0, h0)) + w1, h1 = result_image.size + result_image.paste(control_image, (w1 - w0, h1 - h0)) + + return result_image diff --git a/server/pipelines/controlnetMistoLineHyperSDXL.py b/server/pipelines/controlnetMistoLineHyperSDXL.py new file mode 100644 index 0000000000000000000000000000000000000000..098829f29180d0f584ffc7d6e8294fdb7237a1c3 --- /dev/null +++ b/server/pipelines/controlnetMistoLineHyperSDXL.py @@ -0,0 +1,288 @@ +from diffusers import ( + StableDiffusionXLControlNetImg2ImgPipeline, + ControlNetModel, + AutoencoderKL, + TCDScheduler, +) +from compel import Compel, ReturnedEmbeddingsType +import torch +from controlnet_aux import AnylineDetector +from huggingface_hub import hf_hub_download + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +# controlnet_model = "diffusers/controlnet-canny-sdxl-1.0" +controlnet_model = "TheMistoAI/MistoLine" +model_id = "stabilityai/stable-diffusion-xl-base-1.0" +taesd_model = "madebyollin/taesdxl" + +default_prompt = "Portrait of The Terminator with , glare pose, detailed, intricate, full of colour, cinematic lighting, trending on artstation, 8k, hyperrealistic, focused, extreme details, unreal engine 5 cinematic, masterpiece" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + SD Turbo +Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "controlnet+sd15Turbo" + title: str = "SDv1.5 Turbo + Controlnet" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + seed: int = Field( + 4402026899276587, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 1, min=1, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 512, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 512, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 1.21, + min=0, + max=10, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.8, + min=0.10, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + controlnet_scale: float = Field( + 0.325, + min=0, + max=1.0, + step=0.001, + title="Controlnet Scale", + field="range", + hide=True, + id="controlnet_scale", + ) + controlnet_start: float = Field( + 0.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet Start", + field="range", + hide=True, + id="controlnet_start", + ) + controlnet_end: float = Field( + 1.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet End", + field="range", + hide=True, + id="controlnet_end", + ) + canny_low_threshold: float = Field( + 0.31, + min=0, + max=1.0, + step=0.001, + title="Canny Low Threshold", + field="range", + hide=True, + id="canny_low_threshold", + ) + canny_high_threshold: float = Field( + 0.125, + min=0, + max=1.0, + step=0.001, + title="Canny High Threshold", + field="range", + hide=True, + id="canny_high_threshold", + ) + debug_canny: bool = Field( + False, + title="Debug Canny", + field="checkbox", + hide=True, + id="debug_canny", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + controlnet_canny = ControlNetModel.from_pretrained( + controlnet_model, torch_dtype=torch_dtype + ) + self.pipes = {} + + self.pipe = StableDiffusionControlNetImg2ImgPipeline.from_pretrained( + base_model, + controlnet=controlnet_canny, + safety_checker=None, + torch_dtype=torch_dtype, + ) + + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.sfast: + print("\nRunning sfast compile\n") + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + if args.onediff: + print("\nRunning onediff compile\n") + from onediff.infer_compiler import oneflow_compile + + self.pipe.unet = oneflow_compile(self.pipe.unet) + self.pipe.vae.encoder = oneflow_compile(self.pipe.vae.encoder) + self.pipe.vae.decoder = oneflow_compile(self.pipe.vae.decoder) + self.pipe.controlnet = oneflow_compile(self.pipe.controlnet) + + self.canny_torch = SobelOperator(device=device) + + self.pipe.scheduler = LCMScheduler.from_config(self.pipe.scheduler.config) + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.compel: + from compel import Compel + + self.pipe.compel_proc = Compel( + tokenizer=self.pipe.tokenizer, + text_encoder=self.pipe.text_encoder, + truncate_long_prompts=True, + ) + + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.torch_compile: + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + control_image=[Image.new("RGB", (768, 768))], + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + prompt = params.prompt + prompt_embeds = None + if hasattr(self.pipe, "compel_proc"): + prompt_embeds = self.pipe.compel_proc( + [params.prompt, params.negative_prompt] + ) + prompt = None + control_image = self.canny_torch( + params.image, params.canny_low_threshold, params.canny_high_threshold + ) + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + results = self.pipe( + image=params.image, + control_image=control_image, + prompt=prompt, + prompt_embeds=prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + controlnet_conditioning_scale=params.controlnet_scale, + control_guidance_start=params.controlnet_start, + control_guidance_end=params.controlnet_end, + ) + result_image = results.images[0] + if params.debug_canny: + # paste control_image on top of result_image + w0, h0 = (200, 200) + control_image = control_image.resize((w0, h0)) + w1, h1 = result_image.size + result_image.paste(control_image, (w1 - w0, h1 - h0)) + + return result_image diff --git a/server/pipelines/controlnetSDXLTurbo.py b/server/pipelines/controlnetSDXLTurbo.py new file mode 100644 index 0000000000000000000000000000000000000000..60adafcd72de8f4b6cb652c8af388281e4ebb04f --- /dev/null +++ b/server/pipelines/controlnetSDXLTurbo.py @@ -0,0 +1,281 @@ +from diffusers import ( + StableDiffusionXLControlNetImg2ImgPipeline, + ControlNetModel, + AutoencoderKL, + AutoencoderTiny, +) +from compel import Compel, ReturnedEmbeddingsType +import torch +from pipelines.utils.canny_gpu import SobelOperator + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +controlnet_model = "diffusers/controlnet-canny-sdxl-1.0" +model_id = "stabilityai/sdxl-turbo" +taesd_model = "madebyollin/taesdxl" + +default_prompt = "Portrait of The Terminator with , glare pose, detailed, intricate, full of colour, cinematic lighting, trending on artstation, 8k, hyperrealistic, focused, extreme details, unreal engine 5 cinematic, masterpiece" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + SDXL Turbo +Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "controlnet+SDXL+Turbo" + title: str = "SDXL Turbo + Controlnet" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + negative_prompt: str = Field( + default_negative_prompt, + title="Negative Prompt", + field="textarea", + id="negative_prompt", + hide=True, + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 2, min=1, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 1024, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 1024, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 1.0, + min=0, + max=10, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.5, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + controlnet_scale: float = Field( + 0.5, + min=0, + max=1.0, + step=0.001, + title="Controlnet Scale", + field="range", + hide=True, + id="controlnet_scale", + ) + controlnet_start: float = Field( + 0.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet Start", + field="range", + hide=True, + id="controlnet_start", + ) + controlnet_end: float = Field( + 1.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet End", + field="range", + hide=True, + id="controlnet_end", + ) + canny_low_threshold: float = Field( + 0.31, + min=0, + max=1.0, + step=0.001, + title="Canny Low Threshold", + field="range", + hide=True, + id="canny_low_threshold", + ) + canny_high_threshold: float = Field( + 0.125, + min=0, + max=1.0, + step=0.001, + title="Canny High Threshold", + field="range", + hide=True, + id="canny_high_threshold", + ) + debug_canny: bool = Field( + False, + title="Debug Canny", + field="checkbox", + hide=True, + id="debug_canny", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + controlnet_canny = ControlNetModel.from_pretrained( + controlnet_model, torch_dtype=torch_dtype + ).to(device) + vae = AutoencoderKL.from_pretrained( + "madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch_dtype + ) + + self.pipe = StableDiffusionXLControlNetImg2ImgPipeline.from_pretrained( + model_id, + safety_checker=None, + controlnet=controlnet_canny, + vae=vae, + ) + self.canny_torch = SobelOperator(device=device) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype).to(device) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.compel: + self.pipe.compel_proc = Compel( + tokenizer=[self.pipe.tokenizer, self.pipe.tokenizer_2], + text_encoder=[self.pipe.text_encoder, self.pipe.text_encoder_2], + returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED, + requires_pooled=[False, True], + ) + + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.torch_compile: + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + control_image=[Image.new("RGB", (768, 768))], + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + + prompt = params.prompt + negative_prompt = params.negative_prompt + prompt_embeds = None + pooled_prompt_embeds = None + negative_prompt_embeds = None + negative_pooled_prompt_embeds = None + if hasattr(self.pipe, "compel_proc"): + _prompt_embeds, pooled_prompt_embeds = self.pipe.compel_proc( + [params.prompt, params.negative_prompt] + ) + prompt = None + negative_prompt = None + prompt_embeds = _prompt_embeds[0:1] + pooled_prompt_embeds = pooled_prompt_embeds[0:1] + negative_prompt_embeds = _prompt_embeds[1:2] + negative_pooled_prompt_embeds = pooled_prompt_embeds[1:2] + + control_image = self.canny_torch( + params.image, params.canny_low_threshold, params.canny_high_threshold + ) + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + results = self.pipe( + image=params.image, + control_image=control_image, + prompt=prompt, + negative_prompt=negative_prompt, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + negative_pooled_prompt_embeds=negative_pooled_prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + controlnet_conditioning_scale=params.controlnet_scale, + control_guidance_start=params.controlnet_start, + control_guidance_end=params.controlnet_end, + ) + result_image = results.images[0] + if params.debug_canny: + # paste control_image on top of result_image + w0, h0 = (200, 200) + control_image = control_image.resize((w0, h0)) + w1, h1 = result_image.size + result_image.paste(control_image, (w1 - w0, h1 - h0)) + + return result_image diff --git a/server/pipelines/controlnetSegmindVegaRT.py b/server/pipelines/controlnetSegmindVegaRT.py new file mode 100644 index 0000000000000000000000000000000000000000..559485d626cb7a0ed692468d80ef18c0bb2021ca --- /dev/null +++ b/server/pipelines/controlnetSegmindVegaRT.py @@ -0,0 +1,289 @@ +from diffusers import ( + StableDiffusionXLControlNetImg2ImgPipeline, + ControlNetModel, + AutoencoderKL, + AutoencoderTiny, + LCMScheduler, +) +from compel import Compel, ReturnedEmbeddingsType +import torch +from pipelines.utils.canny_gpu import SobelOperator + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +controlnet_model = "diffusers/controlnet-canny-sdxl-1.0" +base_model = "segmind/Segmind-Vega" +lora_model = "segmind/Segmind-VegaRT" +taesd_model = "madebyollin/taesdxl" + +default_prompt = "Portrait of The Terminator with , glare pose, detailed, intricate, full of colour, cinematic lighting, trending on artstation, 8k, hyperrealistic, focused, extreme details, unreal engine 5 cinematic, masterpiece" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + Segmind-VegaRT +Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "controlnet+SegmindVegaRT" + title: str = "SegmindVegaRT + Controlnet" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + negative_prompt: str = Field( + default_negative_prompt, + title="Negative Prompt", + field="textarea", + id="negative_prompt", + hide=True, + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 2, min=1, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 1024, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 1024, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 0.0, + min=0, + max=1, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.5, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + controlnet_scale: float = Field( + 0.5, + min=0, + max=1.0, + step=0.001, + title="Controlnet Scale", + field="range", + hide=True, + id="controlnet_scale", + ) + controlnet_start: float = Field( + 0.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet Start", + field="range", + hide=True, + id="controlnet_start", + ) + controlnet_end: float = Field( + 1.0, + min=0, + max=1.0, + step=0.001, + title="Controlnet End", + field="range", + hide=True, + id="controlnet_end", + ) + canny_low_threshold: float = Field( + 0.31, + min=0, + max=1.0, + step=0.001, + title="Canny Low Threshold", + field="range", + hide=True, + id="canny_low_threshold", + ) + canny_high_threshold: float = Field( + 0.125, + min=0, + max=1.0, + step=0.001, + title="Canny High Threshold", + field="range", + hide=True, + id="canny_high_threshold", + ) + debug_canny: bool = Field( + False, + title="Debug Canny", + field="checkbox", + hide=True, + id="debug_canny", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + controlnet_canny = ControlNetModel.from_pretrained( + controlnet_model, + torch_dtype=torch_dtype, + ).to(device) + vae = AutoencoderKL.from_pretrained( + "madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch_dtype + ) + self.pipe = StableDiffusionXLControlNetImg2ImgPipeline.from_pretrained( + base_model, + safety_checker=None, + controlnet=controlnet_canny, + vae=vae, + ) + self.canny_torch = SobelOperator(device=device) + + self.pipe.load_lora_weights(lora_model) + self.pipe.fuse_lora() + self.pipe.scheduler = LCMScheduler.from_pretrained( + base_model, subfolder="scheduler" + ) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype).to(device) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.compel: + self.pipe.compel_proc = Compel( + tokenizer=[self.pipe.tokenizer, self.pipe.tokenizer_2], + text_encoder=[self.pipe.text_encoder, self.pipe.text_encoder_2], + returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED, + requires_pooled=[False, True], + ) + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.torch_compile: + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + control_image=[Image.new("RGB", (768, 768))], + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + + prompt = params.prompt + negative_prompt = params.negative_prompt + prompt_embeds = None + pooled_prompt_embeds = None + negative_prompt_embeds = None + negative_pooled_prompt_embeds = None + if hasattr(self.pipe, "compel_proc"): + _prompt_embeds, pooled_prompt_embeds = self.pipe.compel_proc( + [params.prompt, params.negative_prompt] + ) + prompt = None + negative_prompt = None + prompt_embeds = _prompt_embeds[0:1] + pooled_prompt_embeds = pooled_prompt_embeds[0:1] + negative_prompt_embeds = _prompt_embeds[1:2] + negative_pooled_prompt_embeds = pooled_prompt_embeds[1:2] + + control_image = self.canny_torch( + params.image, params.canny_low_threshold, params.canny_high_threshold + ) + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + results = self.pipe( + image=params.image, + control_image=control_image, + prompt=prompt, + negative_prompt=negative_prompt, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + negative_pooled_prompt_embeds=negative_pooled_prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + controlnet_conditioning_scale=params.controlnet_scale, + control_guidance_start=params.controlnet_start, + control_guidance_end=params.controlnet_end, + ) + + result_image = results.images[0] + if params.debug_canny: + # paste control_image on top of result_image + w0, h0 = (200, 200) + control_image = control_image.resize((w0, h0)) + w1, h1 = result_image.size + result_image.paste(control_image, (w1 - w0, h1 - h0)) + + return result_image diff --git a/server/pipelines/img2img.py b/server/pipelines/img2img.py new file mode 100644 index 0000000000000000000000000000000000000000..679bd198e7f630b71b5ee5fbcd460ff1edb4c2f0 --- /dev/null +++ b/server/pipelines/img2img.py @@ -0,0 +1,171 @@ +from diffusers import ( + AutoPipelineForImage2Image, + AutoencoderTiny, +) +from compel import Compel +import torch + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +base_model = "SimianLuo/LCM_Dreamshaper_v7" +taesd_model = "madebyollin/taesd" + +default_prompt = "Portrait of The Terminator with , glare pose, detailed, intricate, full of colour, cinematic lighting, trending on artstation, 8k, hyperrealistic, focused, extreme details, unreal engine 5 cinematic, masterpiece" +page_content = """ ++ This demo showcases + LCM +Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "img2img" + title: str = "Image-to-Image LCM" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 4, min=1, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 768, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 768, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 0.2, + min=0, + max=20, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.5, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + self.pipe = AutoPipelineForImage2Image.from_pretrained( + base_model, + safety_checker=None, + ) + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.torch_compile: + print("Running torch compile") + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + ) + + if args.compel: + self.compel_proc = Compel( + tokenizer=self.pipe.tokenizer, + text_encoder=self.pipe.text_encoder, + truncate_long_prompts=False, + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + prompt_embeds = None + prompt = params.prompt + if hasattr(self, "compel_proc"): + prompt_embeds = self.compel_proc(params.prompt) + prompt = None + + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + results = self.pipe( + image=params.image, + prompt=prompt, + prompt_embeds=prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + ) + + return results.images[0] diff --git a/server/pipelines/img2imgSDTurbo.py b/server/pipelines/img2imgSDTurbo.py new file mode 100644 index 0000000000000000000000000000000000000000..d3f935128d8ffbe8943ad7ae9af5a7d753076fab --- /dev/null +++ b/server/pipelines/img2imgSDTurbo.py @@ -0,0 +1,187 @@ +from diffusers import ( + AutoPipelineForImage2Image, + AutoencoderTiny, +) +import torch + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + + +base_model = "stabilityai/sd-turbo" +taesd_model = "madebyollin/taesd" + +default_prompt = "close-up photography of old man standing in the rain at night, in a street lit by lamps, leica 35mm summilux" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + SDXL Turbo +Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "img2img" + title: str = "Image-to-Image SDXL" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + negative_prompt: str = Field( + default_negative_prompt, + title="Negative Prompt", + field="textarea", + id="negative_prompt", + hide=True, + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 1, min=1, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 512, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 512, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + strength: float = Field( + 0.5, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + self.pipe = AutoPipelineForImage2Image.from_pretrained( + base_model, + safety_checker=None, + ) + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + print("\nRunning sfast compile\n") + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + if args.onediff: + print("\nRunning onediff compile\n") + from onediff.infer_compiler import oneflow_compile + + self.pipe.unet = oneflow_compile(self.pipe.unet) + self.pipe.vae.encoder = oneflow_compile(self.pipe.vae.encoder) + self.pipe.vae.decoder = oneflow_compile(self.pipe.vae.decoder) + + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.torch_compile: + print("Running torch compile") + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + ) + if args.compel: + from compel import Compel + + self.pipe.compel_proc = Compel( + tokenizer=self.pipe.tokenizer, + text_encoder=self.pipe.text_encoder, + truncate_long_prompts=True, + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + prompt = params.prompt + prompt_embeds = None + if hasattr(self.pipe, "compel_proc"): + prompt_embeds = self.pipe.compel_proc( + [params.prompt, params.negative_prompt] + ) + prompt = None + + results = self.pipe( + image=params.image, + prompt_embeds=prompt_embeds, + prompt=prompt, + negative_prompt=params.negative_prompt, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=1.1, + width=params.width, + height=params.height, + output_type="pil", + ) + + return results.images[0] diff --git a/server/pipelines/img2imgSDXL-Lightning.py b/server/pipelines/img2imgSDXL-Lightning.py new file mode 100644 index 0000000000000000000000000000000000000000..1a0a9613972366f86f2df3dbf766159dce26cf1f --- /dev/null +++ b/server/pipelines/img2imgSDXL-Lightning.py @@ -0,0 +1,216 @@ +from diffusers import ( + AutoPipelineForImage2Image, + AutoencoderTiny, + AutoencoderKL, + UNet2DConditionModel, + EulerDiscreteScheduler, +) +from compel import Compel, ReturnedEmbeddingsType +import torch + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +from safetensors.torch import load_file +from huggingface_hub import hf_hub_download +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +base = "stabilityai/stable-diffusion-xl-base-1.0" +repo = "ByteDance/SDXL-Lightning" +ckpt = "sdxl_lightning_2step_unet.safetensors" +taesd_model = "madebyollin/taesdxl" +NUM_STEPS = 2 + +default_prompt = "close-up photography of old man standing in the rain at night, in a street lit by lamps, leica 35mm summilux" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + SDXL Turbo +Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "img2img" + title: str = "Image-to-Image SDXL-Lightning" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + negative_prompt: str = Field( + default_negative_prompt, + title="Negative Prompt", + field="textarea", + id="negative_prompt", + hide=True, + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 1, min=1, max=10, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 1024, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 1024, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 0.0, + min=0, + max=1, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.5, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + if args.taesd: + vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ) + else: + vae = AutoencoderKL.from_pretrained( + "madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch_dtype + ) + + unet = UNet2DConditionModel.from_config(base, subfolder="unet") + unet.load_state_dict(load_file(hf_hub_download(repo, ckpt), device=device.type)) + self.pipe = AutoPipelineForImage2Image.from_pretrained( + base, + unet=unet, + torch_dtype=torch_dtype, + variant="fp16", + safety_checker=False, + vae=vae, + ) + # Ensure sampler uses "trailing" timesteps. + self.pipe.scheduler = EulerDiscreteScheduler.from_config( + self.pipe.scheduler.config, timestep_spacing="trailing" + ) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.torch_compile: + print("Running torch compile") + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + ) + + if args.compel: + self.pipe.compel_proc = Compel( + tokenizer=[self.pipe.tokenizer, self.pipe.tokenizer_2], + text_encoder=[self.pipe.text_encoder, self.pipe.text_encoder_2], + returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED, + requires_pooled=[False, True], + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + prompt = params.prompt + negative_prompt = params.negative_prompt + prompt_embeds = None + pooled_prompt_embeds = None + negative_prompt_embeds = None + negative_pooled_prompt_embeds = None + if hasattr(self.pipe, "compel_proc"): + _prompt_embeds, pooled_prompt_embeds = self.pipe.compel_proc( + [params.prompt, params.negative_prompt] + ) + prompt = None + negative_prompt = None + prompt_embeds = _prompt_embeds[0:1] + pooled_prompt_embeds = pooled_prompt_embeds[0:1] + negative_prompt_embeds = _prompt_embeds[1:2] + negative_pooled_prompt_embeds = pooled_prompt_embeds[1:2] + + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + results = self.pipe( + image=params.image, + prompt=prompt, + negative_prompt=negative_prompt, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + negative_pooled_prompt_embeds=negative_pooled_prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + ) + + return results.images[0] diff --git a/server/pipelines/img2imgSDXLTurbo.py b/server/pipelines/img2imgSDXLTurbo.py new file mode 100644 index 0000000000000000000000000000000000000000..13815b5913e6f2527d789e2725eac197783540d9 --- /dev/null +++ b/server/pipelines/img2imgSDXLTurbo.py @@ -0,0 +1,194 @@ +from diffusers import ( + AutoPipelineForImage2Image, + AutoencoderTiny, +) +from compel import Compel, ReturnedEmbeddingsType +import torch + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +base_model = "stabilityai/sdxl-turbo" +taesd_model = "madebyollin/taesdxl" + +default_prompt = "close-up photography of old man standing in the rain at night, in a street lit by lamps, leica 35mm summilux" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + SDXL Turbo +Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "img2img" + title: str = "Image-to-Image SDXL" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + negative_prompt: str = Field( + default_negative_prompt, + title="Negative Prompt", + field="textarea", + id="negative_prompt", + hide=True, + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 1, min=1, max=10, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 768, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 768, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 1.0, + min=0, + max=1, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.5, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + self.pipe = AutoPipelineForImage2Image.from_pretrained( + base_model, + safety_checker=None, + ) + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.torch_compile: + print("Running torch compile") + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + ) + + if args.compel: + self.pipe.compel_proc = Compel( + tokenizer=[self.pipe.tokenizer, self.pipe.tokenizer_2], + text_encoder=[self.pipe.text_encoder, self.pipe.text_encoder_2], + returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED, + requires_pooled=[False, True], + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + prompt = params.prompt + negative_prompt = params.negative_prompt + prompt_embeds = None + pooled_prompt_embeds = None + negative_prompt_embeds = None + negative_pooled_prompt_embeds = None + if hasattr(self.pipe, "compel_proc"): + _prompt_embeds, pooled_prompt_embeds = self.pipe.compel_proc( + [params.prompt, params.negative_prompt] + ) + prompt = None + negative_prompt = None + prompt_embeds = _prompt_embeds[0:1] + pooled_prompt_embeds = pooled_prompt_embeds[0:1] + negative_prompt_embeds = _prompt_embeds[1:2] + negative_pooled_prompt_embeds = pooled_prompt_embeds[1:2] + + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + results = self.pipe( + image=params.image, + prompt=prompt, + negative_prompt=negative_prompt, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + negative_pooled_prompt_embeds=negative_pooled_prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + ) + + return results.images[0] diff --git a/server/pipelines/img2imgSDXS512.py b/server/pipelines/img2imgSDXS512.py new file mode 100644 index 0000000000000000000000000000000000000000..1b7d23a4eb6d7d8145da0cb032e50c23ff131b3d --- /dev/null +++ b/server/pipelines/img2imgSDXS512.py @@ -0,0 +1,162 @@ +from diffusers import AutoPipelineForImage2Image, AutoencoderTiny +from compel import Compel +import torch + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +base_model = "IDKiro/sdxs-512-0.9" +taesd_model = "madebyollin/taesd" + +default_prompt = "Portrait of The Terminator with , glare pose, detailed, intricate, full of colour, cinematic lighting, trending on artstation, 8k, hyperrealistic, focused, extreme details, unreal engine 5 cinematic, masterpiece" +page_content = """ ++ This demo showcases + LCM +Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "img2img" + title: str = "Image-to-Image SDXS" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 1, min=1, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 512, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 512, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 0.0, + min=0, + max=20, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.5, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + self.pipe = AutoPipelineForImage2Image.from_pretrained( + base_model, + safety_checker=None, + ) + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.torch_compile: + print("Running torch compile") + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + ) + + if args.compel: + self.compel_proc = Compel( + tokenizer=self.pipe.tokenizer, + text_encoder=self.pipe.text_encoder, + truncate_long_prompts=False, + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + prompt_embeds = None + prompt = params.prompt + if hasattr(self, "compel_proc"): + prompt_embeds = self.compel_proc(params.prompt) + prompt = None + + results = self.pipe( + image=params.image, + prompt=prompt, + prompt_embeds=prompt_embeds, + generator=generator, + strength=params.strength, + num_inference_steps=params.steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + ) + return results.images[0] diff --git a/server/pipelines/img2imgSegmindVegaRT.py b/server/pipelines/img2imgSegmindVegaRT.py new file mode 100644 index 0000000000000000000000000000000000000000..4523b60d643e07f316192b1de3e81576e5c631c0 --- /dev/null +++ b/server/pipelines/img2imgSegmindVegaRT.py @@ -0,0 +1,202 @@ +from diffusers import ( + AutoPipelineForImage2Image, + LCMScheduler, + AutoencoderTiny, +) +from compel import Compel, ReturnedEmbeddingsType +import torch + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +import math + +base_model = "segmind/Segmind-Vega" +lora_model = "segmind/Segmind-VegaRT" +taesd_model = "madebyollin/taesdxl" + +default_prompt = "close-up photography of old man standing in the rain at night, in a street lit by lamps, leica 35mm summilux" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + SegmindVegaRT +Image to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "img2img" + title: str = "Image-to-Image Playground 256" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + negative_prompt: str = Field( + default_negative_prompt, + title="Negative Prompt", + field="textarea", + id="negative_prompt", + hide=True, + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 1, min=1, max=10, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 1024, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 1024, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 0.0, + min=0, + max=1, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + strength: float = Field( + 0.5, + min=0.25, + max=1.0, + step=0.001, + title="Strength", + field="range", + hide=True, + id="strength", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + self.pipe = AutoPipelineForImage2Image.from_pretrained( + base_model, + safety_checker=None, + variant="fp16", + ) + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + self.pipe.load_lora_weights(lora_model) + self.pipe.fuse_lora() + self.pipe.scheduler = LCMScheduler.from_pretrained( + base_model, subfolder="scheduler" + ) + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.torch_compile: + print("Running torch compile") + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=False + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=False + ) + + self.pipe( + prompt="warmup", + image=[Image.new("RGB", (768, 768))], + ) + if args.compel: + self.pipe.compel_proc = Compel( + tokenizer=[self.pipe.tokenizer, self.pipe.tokenizer_2], + text_encoder=[self.pipe.text_encoder, self.pipe.text_encoder_2], + returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED, + requires_pooled=[False, True], + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + prompt = params.prompt + negative_prompt = params.negative_prompt + prompt_embeds = None + pooled_prompt_embeds = None + negative_prompt_embeds = None + negative_pooled_prompt_embeds = None + if hasattr(self.pipe, "compel_proc"): + _prompt_embeds, pooled_prompt_embeds = self.pipe.compel_proc( + [params.prompt, params.negative_prompt] + ) + prompt = None + negative_prompt = None + prompt_embeds = _prompt_embeds[0:1] + pooled_prompt_embeds = pooled_prompt_embeds[0:1] + negative_prompt_embeds = _prompt_embeds[1:2] + negative_pooled_prompt_embeds = pooled_prompt_embeds[1:2] + + steps = params.steps + strength = params.strength + if int(steps * strength) < 1: + steps = math.ceil(1 / max(0.10, strength)) + + results = self.pipe( + image=params.image, + prompt=prompt, + negative_prompt=negative_prompt, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + negative_pooled_prompt_embeds=negative_pooled_prompt_embeds, + generator=generator, + strength=strength, + num_inference_steps=steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + ) + + return results.images[0] diff --git a/server/pipelines/pix2pix/__init__.py b/server/pipelines/pix2pix/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/server/pipelines/pix2pix/model.py b/server/pipelines/pix2pix/model.py new file mode 100644 index 0000000000000000000000000000000000000000..35095850692f08217f7b297dac80ac1875f2a91e --- /dev/null +++ b/server/pipelines/pix2pix/model.py @@ -0,0 +1,59 @@ +# https://github.com/GaParmar/img2img-turbo/blob/main/src/model.py +from diffusers import DDPMScheduler + + +def make_1step_sched(): + noise_scheduler_1step = DDPMScheduler.from_pretrained( + "stabilityai/sd-turbo", subfolder="scheduler" + ) + noise_scheduler_1step.set_timesteps(1, device="cuda") + noise_scheduler_1step.alphas_cumprod = noise_scheduler_1step.alphas_cumprod.cuda() + return noise_scheduler_1step + + +def my_vae_encoder_fwd(self, sample): + sample = self.conv_in(sample) + l_blocks = [] + # down + for down_block in self.down_blocks: + l_blocks.append(sample) + sample = down_block(sample) + # middle + sample = self.mid_block(sample) + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + self.current_down_blocks = l_blocks + return sample + + +def my_vae_decoder_fwd(self, sample, latent_embeds=None): + sample = self.conv_in(sample) + upscale_dtype = next(iter(self.up_blocks.parameters())).dtype + # middle + sample = self.mid_block(sample, latent_embeds) + sample = sample.to(upscale_dtype) + if not self.ignore_skip: + skip_convs = [ + self.skip_conv_1, + self.skip_conv_2, + self.skip_conv_3, + self.skip_conv_4, + ] + # up + for idx, up_block in enumerate(self.up_blocks): + skip_in = skip_convs[idx](self.incoming_skip_acts[::-1][idx] * self.gamma) + # add skip + sample = sample + skip_in + sample = up_block(sample, latent_embeds) + else: + for idx, up_block in enumerate(self.up_blocks): + sample = up_block(sample, latent_embeds) + # post-process + if latent_embeds is None: + sample = self.conv_norm_out(sample) + else: + sample = self.conv_norm_out(sample, latent_embeds) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + return sample diff --git a/server/pipelines/pix2pix/pix2pix_turbo.py b/server/pipelines/pix2pix/pix2pix_turbo.py new file mode 100644 index 0000000000000000000000000000000000000000..c476c7bdce814e23ccb2f580895c8f6727ed227a --- /dev/null +++ b/server/pipelines/pix2pix/pix2pix_turbo.py @@ -0,0 +1,213 @@ +# https://github.com/GaParmar/img2img-turbo/blob/main/src/pix2pix_turbo.py +import os +import requests +import sys +import pdb +import copy +from tqdm import tqdm +import torch +from transformers import AutoTokenizer, PretrainedConfig, CLIPTextModel +from diffusers import AutoencoderKL, UNet2DConditionModel, DDPMScheduler +from diffusers.utils.peft_utils import set_weights_and_activate_adapters +from peft import LoraConfig + +from pipelines.pix2pix.model import ( + make_1step_sched, + my_vae_encoder_fwd, + my_vae_decoder_fwd, +) + + +class TwinConv(torch.nn.Module): + def __init__(self, convin_pretrained, convin_curr): + super(TwinConv, self).__init__() + self.conv_in_pretrained = copy.deepcopy(convin_pretrained) + self.conv_in_curr = copy.deepcopy(convin_curr) + self.r = None + + def forward(self, x): + x1 = self.conv_in_pretrained(x).detach() + x2 = self.conv_in_curr(x) + return x1 * (1 - self.r) + x2 * (self.r) + + +class Pix2Pix_Turbo(torch.nn.Module): + def __init__(self, name, ckpt_folder="checkpoints"): + super().__init__() + self.tokenizer = AutoTokenizer.from_pretrained( + "stabilityai/sd-turbo", subfolder="tokenizer" + ) + self.text_encoder = CLIPTextModel.from_pretrained( + "stabilityai/sd-turbo", subfolder="text_encoder" + ).cuda() + self.sched = make_1step_sched() + + vae = AutoencoderKL.from_pretrained("stabilityai/sd-turbo", subfolder="vae") + unet = UNet2DConditionModel.from_pretrained( + "stabilityai/sd-turbo", subfolder="unet" + ) + + if name == "edge_to_image": + url = "https://www.cs.cmu.edu/~img2img-turbo/models/edge_to_image_loras.pkl" + os.makedirs(ckpt_folder, exist_ok=True) + outf = os.path.join(ckpt_folder, "edge_to_image_loras.pkl") + if not os.path.exists(outf): + print(f"Downloading checkpoint to {outf}") + response = requests.get(url, stream=True) + total_size_in_bytes = int(response.headers.get("content-length", 0)) + block_size = 1024 # 1 Kibibyte + progress_bar = tqdm( + total=total_size_in_bytes, unit="iB", unit_scale=True + ) + with open(outf, "wb") as file: + for data in response.iter_content(block_size): + progress_bar.update(len(data)) + file.write(data) + progress_bar.close() + if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes: + print("ERROR, something went wrong") + print(f"Downloaded successfully to {outf}") + p_ckpt = outf + sd = torch.load(p_ckpt, map_location="cpu") + unet_lora_config = LoraConfig( + r=sd["rank_unet"], + init_lora_weights="gaussian", + target_modules=sd["unet_lora_target_modules"], + ) + + if name == "sketch_to_image_stochastic": + # download from url + url = "https://www.cs.cmu.edu/~img2img-turbo/models/sketch_to_image_stochastic_lora.pkl" + os.makedirs(ckpt_folder, exist_ok=True) + outf = os.path.join(ckpt_folder, "sketch_to_image_stochastic_lora.pkl") + if not os.path.exists(outf): + print(f"Downloading checkpoint to {outf}") + response = requests.get(url, stream=True) + total_size_in_bytes = int(response.headers.get("content-length", 0)) + block_size = 1024 # 1 Kibibyte + progress_bar = tqdm( + total=total_size_in_bytes, unit="iB", unit_scale=True + ) + with open(outf, "wb") as file: + for data in response.iter_content(block_size): + progress_bar.update(len(data)) + file.write(data) + progress_bar.close() + if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes: + print("ERROR, something went wrong") + print(f"Downloaded successfully to {outf}") + p_ckpt = outf + sd = torch.load(p_ckpt, map_location="cpu") + unet_lora_config = LoraConfig( + r=sd["rank_unet"], + init_lora_weights="gaussian", + target_modules=sd["unet_lora_target_modules"], + ) + convin_pretrained = copy.deepcopy(unet.conv_in) + unet.conv_in = TwinConv(convin_pretrained, unet.conv_in) + + vae.encoder.forward = my_vae_encoder_fwd.__get__( + vae.encoder, vae.encoder.__class__ + ) + vae.decoder.forward = my_vae_decoder_fwd.__get__( + vae.decoder, vae.decoder.__class__ + ) + # add the skip connection convs + vae.decoder.skip_conv_1 = torch.nn.Conv2d( + 512, 512, kernel_size=(1, 1), stride=(1, 1), bias=False + ).cuda() + vae.decoder.skip_conv_2 = torch.nn.Conv2d( + 256, 512, kernel_size=(1, 1), stride=(1, 1), bias=False + ).cuda() + vae.decoder.skip_conv_3 = torch.nn.Conv2d( + 128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False + ).cuda() + vae.decoder.skip_conv_4 = torch.nn.Conv2d( + 128, 256, kernel_size=(1, 1), stride=(1, 1), bias=False + ).cuda() + vae_lora_config = LoraConfig( + r=sd["rank_vae"], + init_lora_weights="gaussian", + target_modules=sd["vae_lora_target_modules"], + ) + vae.decoder.ignore_skip = False + vae.add_adapter(vae_lora_config, adapter_name="vae_skip") + unet.add_adapter(unet_lora_config) + _sd_unet = unet.state_dict() + for k in sd["state_dict_unet"]: + _sd_unet[k] = sd["state_dict_unet"][k] + unet.load_state_dict(_sd_unet) + unet.enable_xformers_memory_efficient_attention() + _sd_vae = vae.state_dict() + for k in sd["state_dict_vae"]: + _sd_vae[k] = sd["state_dict_vae"][k] + vae.load_state_dict(_sd_vae) + unet.to("cuda") + vae.to("cuda") + unet.eval() + vae.eval() + self.unet, self.vae = unet, vae + self.vae.decoder.gamma = 1 + self.timesteps = torch.tensor([999], device="cuda").long() + self.last_prompt = "" + self.caption_enc = None + self.device = "cuda" + + @torch.no_grad() + def forward(self, c_t, prompt, deterministic=True, r=1.0, noise_map=1.0): + # encode the text prompt + if prompt != self.last_prompt: + caption_tokens = self.tokenizer( + prompt, + max_length=self.tokenizer.model_max_length, + padding="max_length", + truncation=True, + return_tensors="pt", + ).input_ids.cuda() + caption_enc = self.text_encoder(caption_tokens)[0] + self.caption_enc = caption_enc + self.last_prompt = prompt + + if deterministic: + encoded_control = ( + self.vae.encode(c_t).latent_dist.sample() + * self.vae.config.scaling_factor + ) + model_pred = self.unet( + encoded_control, + self.timesteps, + encoder_hidden_states=self.caption_enc, + ).sample + x_denoised = self.sched.step( + model_pred, self.timesteps, encoded_control, return_dict=True + ).prev_sample + self.vae.decoder.incoming_skip_acts = self.vae.encoder.current_down_blocks + output_image = ( + self.vae.decode(x_denoised / self.vae.config.scaling_factor).sample + ).clamp(-1, 1) + else: + # scale the lora weights based on the r value + self.unet.set_adapters(["default"], weights=[r]) + set_weights_and_activate_adapters(self.vae, ["vae_skip"], [r]) + encoded_control = ( + self.vae.encode(c_t).latent_dist.sample() + * self.vae.config.scaling_factor + ) + # combine the input and noise + unet_input = encoded_control * r + noise_map * (1 - r) + self.unet.conv_in.r = r + unet_output = self.unet( + unet_input, + self.timesteps, + encoder_hidden_states=self.caption_enc, + ).sample + self.unet.conv_in.r = None + x_denoised = self.sched.step( + unet_output, self.timesteps, unet_input, return_dict=True + ).prev_sample + self.vae.decoder.incoming_skip_acts = self.vae.encoder.current_down_blocks + self.vae.decoder.gamma = r + output_image = ( + self.vae.decode(x_denoised / self.vae.config.scaling_factor).sample + ).clamp(-1, 1) + return output_image diff --git a/server/pipelines/pix2pixTurbo.py b/server/pipelines/pix2pixTurbo.py new file mode 100644 index 0000000000000000000000000000000000000000..b2ad8853204330df8034e56109398c20f31b6065 --- /dev/null +++ b/server/pipelines/pix2pixTurbo.py @@ -0,0 +1,135 @@ +import torch +from torchvision import transforms + +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +from pipelines.pix2pix.pix2pix_turbo import Pix2Pix_Turbo +from pipelines.utils.canny_gpu import ScharrOperator + +default_prompt = "close-up photo of the joker" +page_content = """ ++ This demo showcases + One-Step Image Translation with Text-to-Image Models + +
++ Web app + Real-Time Latent Consistency Models + +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "img2img" + title: str = "Image-to-Image SDXL" + description: str = "Generates an image from a text prompt" + input_mode: str = "image" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + + width: int = Field( + 512, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 512, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + noise_r: float = Field( + 1.0, + min=0.01, + max=3.0, + step=0.001, + title="Noise R", + field="range", + hide=True, + id="noise_r", + ) + + deterministic: bool = Field( + True, + hide=True, + title="Deterministic", + field="checkbox", + id="deterministic", + ) + canny_low_threshold: float = Field( + 0.0, + min=0, + max=1.0, + step=0.001, + title="Canny Low Threshold", + field="range", + hide=True, + id="canny_low_threshold", + ) + canny_high_threshold: float = Field( + 1.0, + min=0, + max=1.0, + step=0.001, + title="Canny High Threshold", + field="range", + hide=True, + id="canny_high_threshold", + ) + debug_canny: bool = Field( + False, + title="Debug Canny", + field="checkbox", + hide=True, + id="debug_canny", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + self.model = Pix2Pix_Turbo("edge_to_image") + self.canny_torch = ScharrOperator(device=device) + self.device = device + self.last_time = 0.0 + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + canny_pil, canny_tensor = self.canny_torch( + params.image, + params.canny_low_threshold, + params.canny_high_threshold, + output_type="pil,tensor", + ) + torch.manual_seed(params.seed) + noise = torch.randn( + (1, 4, params.width // 8, params.height // 8), device=self.device + ) + canny_tensor = torch.cat((canny_tensor, canny_tensor, canny_tensor), dim=1) + output_image = self.model( + canny_tensor, + params.prompt, + params.deterministic, + params.noise_r, + noise, + ) + output_pil = transforms.ToPILImage()(output_image[0].cpu() * 0.5 + 0.5) + + result_image = output_pil + if params.debug_canny: + # paste control_image on top of result_image + w0, h0 = (200, 200) + control_image = canny_pil.resize((w0, h0)) + w1, h1 = result_image.size + result_image.paste(control_image, (w1 - w0, h1 - h0)) + return result_image diff --git a/server/pipelines/txt2img.py b/server/pipelines/txt2img.py new file mode 100644 index 0000000000000000000000000000000000000000..34f640e42bd3938d19ad48ca197b4bd6a2616d44 --- /dev/null +++ b/server/pipelines/txt2img.py @@ -0,0 +1,141 @@ +from diffusers import DiffusionPipeline, AutoencoderTiny +from compel import Compel +import torch + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +from config import Args +from pydantic import BaseModel, Field +from PIL import Image +from typing import List + +base_model = "SimianLuo/LCM_Dreamshaper_v7" +taesd_model = "madebyollin/taesd" + +default_prompt = "Portrait of The Terminator with , glare pose, detailed, intricate, full of colour, cinematic lighting, trending on artstation, 8k, hyperrealistic, focused, extreme details, unreal engine 5 cinematic, masterpiece" + +page_content = """+ This demo showcases + LCM +Image to Image pipeline using + Diffusers with a MJPEG stream server +
++ Change the prompt to generate different images, accepts Compel syntax. +
""" + + +class Pipeline: + class Info(BaseModel): + name: str = "txt2img" + title: str = "Text-to-Image LCM" + description: str = "Generates an image from a text prompt" + input_mode: str = "text" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 4, min=2, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 768, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 768, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 8.0, + min=1, + max=30, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + self.pipe = DiffusionPipeline.from_pretrained(base_model, safety_checker=None) + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype) + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.torch_compile: + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + + self.pipe(prompt="warmup", num_inference_steps=1, guidance_scale=8.0) + + if args.compel: + self.compel_proc = Compel( + tokenizer=self.pipe.tokenizer, + text_encoder=self.pipe.text_encoder, + truncate_long_prompts=False, + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + prompt_embeds = None + prompt = params.prompt + if hasattr(self, "compel_proc"): + prompt_embeds = self.compel_proc(params.prompt) + prompt = None + + results = self.pipe( + prompt_embeds=prompt_embeds, + prompt=prompt, + generator=generator, + num_inference_steps=params.steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + ) + + return results.images[0] diff --git a/server/pipelines/txt2imgLora.py b/server/pipelines/txt2imgLora.py new file mode 100644 index 0000000000000000000000000000000000000000..3168b65694ceaf1bca4e2bd12004fe3ab347f646 --- /dev/null +++ b/server/pipelines/txt2imgLora.py @@ -0,0 +1,151 @@ +from diffusers import DiffusionPipeline, AutoencoderTiny, LCMScheduler +from compel import Compel +import torch + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image + +base_model = "wavymulder/Analog-Diffusion" +lcm_lora_id = "latent-consistency/lcm-lora-sdv1-5" +taesd_model = "madebyollin/taesd" + +default_prompt = "Analog style photograph of young Harrison Ford as Han Solo, star wars behind the scenes" + +page_content = """ ++ This demo showcases + LCM +Image to Image pipeline using + Diffusers with a MJPEG stream server. Featuring Analog-Diffusion +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "controlnet" + title: str = "Text-to-Image LCM + LoRa" + description: str = "Generates an image from a text prompt" + input_mode: str = "text" + page_content: str = page_content + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + seed: int = Field( + 8638236174640251, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 4, min=2, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 512, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 512, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 0.2, + min=0, + max=4, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + self.pipe = DiffusionPipeline.from_pretrained(base_model, safety_checker=None) + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + self.pipe.scheduler = LCMScheduler.from_config(self.pipe.scheduler.config) + self.pipe.set_progress_bar_config(disable=True) + self.pipe.load_lora_weights(lcm_lora_id, adapter_name="lcm") + self.pipe.to(device=device, dtype=torch_dtype) + + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + if args.torch_compile: + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + + self.pipe(prompt="warmup", num_inference_steps=1, guidance_scale=8.0) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + if args.compel: + self.compel_proc = Compel( + tokenizer=self.pipe.tokenizer, + text_encoder=self.pipe.text_encoder, + truncate_long_prompts=False, + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + prompt_embeds = None + prompt = params.prompt + if hasattr(self, "compel_proc"): + prompt_embeds = self.compel_proc(params.prompt) + prompt = None + + results = self.pipe( + prompt=prompt, + prompt_embeds=prompt_embeds, + generator=generator, + num_inference_steps=params.steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + ) + + return results.images[0] diff --git a/server/pipelines/txt2imgLoraSDXL.py b/server/pipelines/txt2imgLoraSDXL.py new file mode 100644 index 0000000000000000000000000000000000000000..d609f887c40628f4dbe86afc3d8b5b99bd6d4f7b --- /dev/null +++ b/server/pipelines/txt2imgLoraSDXL.py @@ -0,0 +1,182 @@ +from diffusers import DiffusionPipeline, LCMScheduler, AutoencoderKL, AutoencoderTiny +from compel import Compel, ReturnedEmbeddingsType +import torch + +try: + import intel_extension_for_pytorch as ipex # type: ignore +except: + pass + +import psutil +from config import Args +from pydantic import BaseModel, Field +from PIL import Image + +model_id = "stabilityai/stable-diffusion-xl-base-1.0" +lcm_lora_id = "latent-consistency/lcm-lora-sdxl" +taesd_model = "madebyollin/taesdxl" + + +default_prompt = "close-up photography of old man standing in the rain at night, in a street lit by lamps, leica 35mm summilux" +default_negative_prompt = "blurry, low quality, render, 3D, oversaturated" +page_content = """ ++ This demo showcases + LCM LoRA + Text to Image pipeline using + Diffusers with a MJPEG stream server. +
++ Change the prompt to generate different images, accepts Compel syntax. +
+""" + + +class Pipeline: + class Info(BaseModel): + name: str = "LCM+Lora+SDXL" + title: str = "Text-to-Image SDXL + LCM + LoRA" + description: str = "Generates an image from a text prompt" + page_content: str = page_content + input_mode: str = "text" + + class InputParams(BaseModel): + prompt: str = Field( + default_prompt, + title="Prompt", + field="textarea", + id="prompt", + ) + negative_prompt: str = Field( + default_negative_prompt, + title="Negative Prompt", + field="textarea", + id="negative_prompt", + hide=True, + ) + seed: int = Field( + 2159232, min=0, title="Seed", field="seed", hide=True, id="seed" + ) + steps: int = Field( + 4, min=1, max=15, title="Steps", field="range", hide=True, id="steps" + ) + width: int = Field( + 1024, min=2, max=15, title="Width", disabled=True, hide=True, id="width" + ) + height: int = Field( + 1024, min=2, max=15, title="Height", disabled=True, hide=True, id="height" + ) + guidance_scale: float = Field( + 1.0, + min=0, + max=20, + step=0.001, + title="Guidance Scale", + field="range", + hide=True, + id="guidance_scale", + ) + + def __init__(self, args: Args, device: torch.device, torch_dtype: torch.dtype): + vae = AutoencoderKL.from_pretrained( + "madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch_dtype + ) + + self.pipe = DiffusionPipeline.from_pretrained( + model_id, + safety_checker=None, + vae=vae, + ) + # Load LCM LoRA + self.pipe.load_lora_weights(lcm_lora_id, adapter_name="lcm") + self.pipe.scheduler = LCMScheduler.from_config(self.pipe.scheduler.config) + self.pipe.set_progress_bar_config(disable=True) + self.pipe.to(device=device, dtype=torch_dtype).to(device) + + if args.sfast: + from sfast.compilers.stable_diffusion_pipeline_compiler import ( + compile, + CompilationConfig, + ) + + config = CompilationConfig.Default() + config.enable_xformers = True + config.enable_triton = True + config.enable_cuda_graph = True + self.pipe = compile(self.pipe, config=config) + + if device.type != "mps": + self.pipe.unet.to(memory_format=torch.channels_last) + + self.pipe.compel_proc = Compel( + tokenizer=[self.pipe.tokenizer, self.pipe.tokenizer_2], + text_encoder=[self.pipe.text_encoder, self.pipe.text_encoder_2], + returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED, + requires_pooled=[False, True], + ) + if args.taesd: + self.pipe.vae = AutoencoderTiny.from_pretrained( + taesd_model, torch_dtype=torch_dtype, use_safetensors=True + ).to(device) + + if args.torch_compile: + self.pipe.unet = torch.compile( + self.pipe.unet, mode="reduce-overhead", fullgraph=True + ) + self.pipe.vae = torch.compile( + self.pipe.vae, mode="reduce-overhead", fullgraph=True + ) + self.pipe( + prompt="warmup", + ) + + def predict(self, params: "Pipeline.InputParams") -> Image.Image: + generator = torch.manual_seed(params.seed) + + prompt = params.prompt + negative_prompt = params.negative_prompt + prompt_embeds = None + pooled_prompt_embeds = None + negative_prompt_embeds = None + negative_pooled_prompt_embeds = None + if hasattr(self.pipe, "compel_proc"): + _prompt_embeds, pooled_prompt_embeds = self.pipe.compel_proc( + [params.prompt, params.negative_prompt] + ) + prompt = None + negative_prompt = None + prompt_embeds = _prompt_embeds[0:1] + pooled_prompt_embeds = pooled_prompt_embeds[0:1] + negative_prompt_embeds = _prompt_embeds[1:2] + negative_pooled_prompt_embeds = pooled_prompt_embeds[1:2] + + results = self.pipe( + prompt=prompt, + negative_prompt=negative_prompt, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + negative_prompt_embeds=negative_prompt_embeds, + negative_pooled_prompt_embeds=negative_pooled_prompt_embeds, + generator=generator, + num_inference_steps=params.steps, + guidance_scale=params.guidance_scale, + width=params.width, + height=params.height, + output_type="pil", + ) + + return results.images[0] diff --git a/server/pipelines/utils/canny_gpu.py b/server/pipelines/utils/canny_gpu.py new file mode 100644 index 0000000000000000000000000000000000000000..33fe74287e83db46b3a75452675f90b5d9406c28 --- /dev/null +++ b/server/pipelines/utils/canny_gpu.py @@ -0,0 +1,117 @@ +import torch +import torch.nn as nn +from torchvision.transforms import ToTensor, ToPILImage +from PIL import Image + + +class SobelOperator(nn.Module): + SOBEL_KERNEL_X = torch.tensor( + [[-1.0, 0.0, 1.0], [-2.0, 0.0, 2.0], [-1.0, 0.0, 1.0]] + ) + SOBEL_KERNEL_Y = torch.tensor( + [[-1.0, -2.0, -1.0], [0.0, 0.0, 0.0], [1.0, 2.0, 1.0]] + ) + + def __init__(self, device="cuda"): + super(SobelOperator, self).__init__() + self.device = device + self.edge_conv_x = nn.Conv2d(1, 1, kernel_size=3, padding=1, bias=False).to( + self.device + ) + self.edge_conv_y = nn.Conv2d(1, 1, kernel_size=3, padding=1, bias=False).to( + self.device + ) + self.edge_conv_x.weight = nn.Parameter( + self.SOBEL_KERNEL_X.view((1, 1, 3, 3)).to(self.device) + ) + self.edge_conv_y.weight = nn.Parameter( + self.SOBEL_KERNEL_Y.view((1, 1, 3, 3)).to(self.device) + ) + + @torch.no_grad() + def forward( + self, + image: Image.Image, + low_threshold: float, + high_threshold: float, + output_type="pil", + ) -> Image.Image | torch.Tensor | tuple[Image.Image, torch.Tensor]: + # Convert PIL image to PyTorch tensor + image_gray = image.convert("L") + image_tensor = ToTensor()(image_gray).unsqueeze(0).to(self.device) + + # Compute gradients + edge_x = self.edge_conv_x(image_tensor) + edge_y = self.edge_conv_y(image_tensor) + edge = torch.sqrt(torch.square(edge_x) + torch.square(edge_y)) + + # Apply thresholding + edge.div_(edge.max()) # Normalize to 0-1 (in-place operation) + edge[edge >= high_threshold] = 1.0 + edge[edge <= low_threshold] = 0.0 + + # Convert the result back to a PIL image + if output_type == "pil": + return ToPILImage()(edge.squeeze(0).cpu()) + elif output_type == "tensor": + return edge + elif output_type == "pil,tensor": + return ToPILImage()(edge.squeeze(0).cpu()), edge + + +class ScharrOperator(nn.Module): + SCHARR_KERNEL_X = torch.tensor( + [[-3.0, 0.0, 3.0], [-10.0, 0.0, 10.0], [-3.0, 0.0, 3.0]] + ) + SCHARR_KERNEL_Y = torch.tensor( + [[-3.0, -10.0, -3.0], [0.0, 0.0, 0.0], [3.0, 10.0, 3.0]] + ) + + def __init__(self, device="cuda"): + super(ScharrOperator, self).__init__() + self.device = device + self.edge_conv_x = nn.Conv2d(1, 1, kernel_size=3, padding=1, bias=False).to( + self.device + ) + self.edge_conv_y = nn.Conv2d(1, 1, kernel_size=3, padding=1, bias=False).to( + self.device + ) + self.edge_conv_x.weight = nn.Parameter( + self.SCHARR_KERNEL_X.view((1, 1, 3, 3)).to(self.device) + ) + self.edge_conv_y.weight = nn.Parameter( + self.SCHARR_KERNEL_Y.view((1, 1, 3, 3)).to(self.device) + ) + + @torch.no_grad() + def forward( + self, + image: Image.Image, + low_threshold: float, + high_threshold: float, + output_type="pil", + invert: bool = False, + ) -> Image.Image | torch.Tensor | tuple[Image.Image, torch.Tensor]: + # Convert PIL image to PyTorch tensor + image_gray = image.convert("L") + image_tensor = ToTensor()(image_gray).unsqueeze(0).to(self.device) + + # Compute gradients + edge_x = self.edge_conv_x(image_tensor) + edge_y = self.edge_conv_y(image_tensor) + edge = torch.abs(edge_x) + torch.abs(edge_y) + + # Apply thresholding + edge.div_(edge.max()) # Normalize to 0-1 (in-place operation) + edge[edge >= high_threshold] = 1.0 + edge[edge <= low_threshold] = 0.0 + if invert: + edge = 1 - edge + + # Convert the result back to a PIL image + if output_type == "pil": + return ToPILImage()(edge.squeeze(0).cpu()) + elif output_type == "tensor": + return edge + elif output_type == "pil,tensor": + return ToPILImage()(edge.squeeze(0).cpu()), edge diff --git a/server/pipelines/utils/safety_checker.py b/server/pipelines/utils/safety_checker.py new file mode 100644 index 0000000000000000000000000000000000000000..8e2cea48cb9907f588fda9e6087b00b2b5835654 --- /dev/null +++ b/server/pipelines/utils/safety_checker.py @@ -0,0 +1,169 @@ +# Copyright 2023 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +import torch.nn as nn +from transformers import CLIPConfig, CLIPVisionModel, PreTrainedModel +from PIL import Image + + +def cosine_distance(image_embeds, text_embeds): + normalized_image_embeds = nn.functional.normalize(image_embeds) + normalized_text_embeds = nn.functional.normalize(text_embeds) + return torch.mm(normalized_image_embeds, normalized_text_embeds.t()) + + +class StableDiffusionSafetyChecker(PreTrainedModel): + config_class = CLIPConfig + + _no_split_modules = ["CLIPEncoderLayer"] + + def __init__(self, config: CLIPConfig): + super().__init__(config) + + self.vision_model = CLIPVisionModel(config.vision_config) + self.visual_projection = nn.Linear( + config.vision_config.hidden_size, config.projection_dim, bias=False + ) + + self.concept_embeds = nn.Parameter( + torch.ones(17, config.projection_dim), requires_grad=False + ) + self.special_care_embeds = nn.Parameter( + torch.ones(3, config.projection_dim), requires_grad=False + ) + + self.concept_embeds_weights = nn.Parameter(torch.ones(17), requires_grad=False) + self.special_care_embeds_weights = nn.Parameter( + torch.ones(3), requires_grad=False + ) + + @torch.no_grad() + def forward(self, clip_input, images): + pooled_output = self.vision_model(clip_input)[1] # pooled_output + image_embeds = self.visual_projection(pooled_output) + + # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16 + special_cos_dist = ( + cosine_distance(image_embeds, self.special_care_embeds) + .cpu() + .float() + .numpy() + ) + cos_dist = ( + cosine_distance(image_embeds, self.concept_embeds).cpu().float().numpy() + ) + + result = [] + batch_size = image_embeds.shape[0] + for i in range(batch_size): + result_img = { + "special_scores": {}, + "special_care": [], + "concept_scores": {}, + "bad_concepts": [], + } + + # increase this value to create a stronger `nfsw` filter + # at the cost of increasing the possibility of filtering benign images + adjustment = 0.0 + + for concept_idx in range(len(special_cos_dist[0])): + concept_cos = special_cos_dist[i][concept_idx] + concept_threshold = self.special_care_embeds_weights[concept_idx].item() + result_img["special_scores"][concept_idx] = round( + concept_cos - concept_threshold + adjustment, 3 + ) + if result_img["special_scores"][concept_idx] > 0: + result_img["special_care"].append( + {concept_idx, result_img["special_scores"][concept_idx]} + ) + adjustment = 0.01 + + for concept_idx in range(len(cos_dist[0])): + concept_cos = cos_dist[i][concept_idx] + concept_threshold = self.concept_embeds_weights[concept_idx].item() + result_img["concept_scores"][concept_idx] = round( + concept_cos - concept_threshold + adjustment, 3 + ) + if result_img["concept_scores"][concept_idx] > 0: + result_img["bad_concepts"].append(concept_idx) + + result.append(result_img) + + has_nsfw_concepts = [len(res["bad_concepts"]) > 0 for res in result] + + return has_nsfw_concepts + + @torch.no_grad() + def forward_onnx(self, clip_input: torch.FloatTensor, images: torch.FloatTensor): + pooled_output = self.vision_model(clip_input)[1] # pooled_output + image_embeds = self.visual_projection(pooled_output) + + special_cos_dist = cosine_distance(image_embeds, self.special_care_embeds) + cos_dist = cosine_distance(image_embeds, self.concept_embeds) + + # increase this value to create a stronger `nsfw` filter + # at the cost of increasing the possibility of filtering benign images + adjustment = 0.0 + + special_scores = ( + special_cos_dist - self.special_care_embeds_weights + adjustment + ) + # special_scores = special_scores.round(decimals=3) + special_care = torch.any(special_scores > 0, dim=1) + special_adjustment = special_care * 0.01 + special_adjustment = special_adjustment.unsqueeze(1).expand( + -1, cos_dist.shape[1] + ) + + concept_scores = (cos_dist - self.concept_embeds_weights) + special_adjustment + # concept_scores = concept_scores.round(decimals=3) + has_nsfw_concepts = torch.any(concept_scores > 0, dim=1) + + images[has_nsfw_concepts] = 0.0 # black image + + return images, has_nsfw_concepts + + +class SafetyChecker: + def __init__(self, device="cuda"): + from transformers import CLIPFeatureExtractor + + self.device = device + self.safety_checker = StableDiffusionSafetyChecker.from_pretrained( + "CompVis/stable-diffusion-safety-checker" + ).to(device) + self.feature_extractor = CLIPFeatureExtractor.from_pretrained( + "openai/clip-vit-base-patch32" + ) + + def __call__( + self, images: list[Image.Image] | Image.Image + ) -> tuple[list[Image.Image], list[bool]] | tuple[Image.Image, bool]: + images_list = [images] if isinstance(images, Image.Image) else images + + safety_checker_input = self.feature_extractor( + images_list, return_tensors="pt" + ).to(self.device) + + has_nsfw_concepts = self.safety_checker( + images=[images_list], + clip_input=safety_checker_input.pixel_values.to(self.device), + ) + + if isinstance(images, Image.Image): + return images, has_nsfw_concepts[0] + + return images, has_nsfw_concepts diff --git a/server/requirements.txt b/server/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..ce927f34858edc8bf2735dccd8028c7b715e33bf --- /dev/null +++ b/server/requirements.txt @@ -0,0 +1,21 @@ +diffusers==0.28.2 +transformers==4.41.1 +--extra-index-url https://download.pytorch.org/whl/cu121; +torch==2.2.2 +fastapi==0.111.0 +uvicorn[standard]==0.30.0 +Pillow==10.3.0 +accelerate==0.30.1 +compel==2.0.2 +controlnet-aux==0.0.9 +peft==0.11.1 +xformers; sys_platform != 'darwin' or platform_machine != 'arm64' +markdown2 +safetensors +stable_fast @ https://github.com/chengzeyi/stable-fast/releases/download/v1.0.5/stable_fast-1.0.5+torch222cu121-cp310-cp310-manylinux2014_x86_64.whl ; sys_platform != 'darwin' or platform_machine != 'arm64' +oneflow @ https://github.com/siliconflow/oneflow_releases/releases/download/community_cu121/oneflow-0.9.1.dev20240515+cu121-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl ; sys_platform != 'darwin' or platform_machine != 'arm64' +onediff @ git+https://github.com/siliconflow/onediff.git@main#egg=onediff ; sys_platform != 'darwin' or platform_machine != 'arm64' +setuptools +mpmath==1.3.0 +numpy==1.* +controlnet-aux \ No newline at end of file diff --git a/server/util.py b/server/util.py new file mode 100644 index 0000000000000000000000000000000000000000..5cd80baea28668296a3578dd67b356f755382c7f --- /dev/null +++ b/server/util.py @@ -0,0 +1,40 @@ +from importlib import import_module +from types import ModuleType +from PIL import Image +import io + + +def get_pipeline_class(pipeline_name: str) -> ModuleType: + try: + module = import_module(f"pipelines.{pipeline_name}") + except ModuleNotFoundError: + raise ValueError(f"Pipeline {pipeline_name} module not found") + + pipeline_class = getattr(module, "Pipeline", None) + + if pipeline_class is None: + raise ValueError(f"'Pipeline' class not found in module '{pipeline_name}'.") + + return pipeline_class + + +def bytes_to_pil(image_bytes: bytes) -> Image.Image: + image = Image.open(io.BytesIO(image_bytes)) + return image + + +def pil_to_frame(image: Image.Image) -> bytes: + frame_data = io.BytesIO() + image.save(frame_data, format="JPEG", quality=80, optimize=True, progressive=True) + frame_data = frame_data.getvalue() + return ( + b"--frame\r\n" + + b"Content-Type: image/jpeg\r\n" + + f"Content-Length: {len(frame_data)}\r\n\r\n".encode() + + frame_data + + b"\r\n" + ) + + +def is_firefox(user_agent: str) -> bool: + return "Firefox" in user_agent