Spaces:
Sleeping
Sleeping
Merge remote-tracking branch 'upstream/main'
Browse files- .env +63 -22
- CONTRIBUTORS.md +9 -0
- LICENCE.md +170 -0
- README.md +24 -11
- package-lock.json +249 -0
- package.json +2 -0
- src/app/engine/caption.ts +2 -2
- src/app/engine/censorship.ts +175 -30
- src/app/engine/render.ts +117 -66
- src/app/interface/bottom-bar/index.tsx +15 -2
- src/app/interface/panel/index.tsx +81 -62
- src/app/interface/top-menu/index.tsx +1 -1
- src/app/main.tsx +3 -1
- src/app/queries/getStory.ts +4 -4
- src/app/queries/getStyle.ts +4 -4
- src/app/queries/predict.ts +3 -134
- src/app/queries/predictWithHuggingFace.ts +90 -0
- src/app/queries/predictWithOpenAI.ts +33 -0
- src/types.ts +2 -0
.env
CHANGED
@@ -1,40 +1,81 @@
|
|
1 |
-
# ------------- IMAGE API CONFIG --------------
|
2 |
# Supported values:
|
3 |
# - VIDEOCHAIN
|
4 |
# - REPLICATE
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
VIDEOCHAIN_API_TOKEN=
|
9 |
-
|
10 |
-
REPLICATE_API_TOKEN=
|
11 |
-
REPLICATE_API_MODEL="stabilityai/sdxl"
|
12 |
-
REPLICATE_API_MODEL_VERSION="da77bc59ee60423279fd632efb4795ab731d9e3ca9705ef3341091fb989b7eaf"
|
13 |
|
14 |
-
# ------------- LLM API CONFIG ----------------
|
15 |
# Supported values:
|
16 |
# - INFERENCE_ENDPOINT
|
17 |
# - INFERENCE_API
|
18 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
|
20 |
-
#
|
21 |
-
|
22 |
|
23 |
-
#
|
24 |
-
# -> You can leave it empty if you decide to use an Inference API Model instead
|
25 |
-
HF_INFERENCE_ENDPOINT_URL=
|
26 |
|
27 |
-
#
|
28 |
-
|
29 |
-
|
30 |
|
31 |
-
#
|
32 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
|
34 |
# ----------- COMMUNITY SHARING (OPTIONAL) -----------
|
35 |
-
NEXT_PUBLIC_ENABLE_COMMUNITY_SHARING="false"
|
36 |
# You don't need those community sharing options to run the AI Comic Factory
|
37 |
# locally or on your own server (they are meant to be used by the Hugging Face team)
|
|
|
38 |
COMMUNITY_API_URL=
|
39 |
COMMUNITY_API_TOKEN=
|
40 |
COMMUNITY_API_ID=
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
# Supported values:
|
2 |
# - VIDEOCHAIN
|
3 |
# - REPLICATE
|
4 |
+
# - INFERENCE_ENDPOINT
|
5 |
+
# - INFERENCE_API
|
6 |
+
RENDERING_ENGINE="INFERENCE_API"
|
|
|
|
|
|
|
|
|
|
|
7 |
|
|
|
8 |
# Supported values:
|
9 |
# - INFERENCE_ENDPOINT
|
10 |
# - INFERENCE_API
|
11 |
+
# - OPENAI
|
12 |
+
LLM_ENGINE="INFERENCE_API"
|
13 |
+
|
14 |
+
# Not implemented for the Inference API yet - you can submit a PR if you have some ideas
|
15 |
+
NEXT_PUBLIC_CAN_UPSCALE="false"
|
16 |
+
|
17 |
+
# Not implemented for the Inference API yet - you can submit a PR if you have some ideas
|
18 |
+
NEXT_PUBLIC_CAN_REDRAW="false"
|
19 |
+
|
20 |
+
# Set to "true" to create artificial delays and smooth out traffic
|
21 |
+
NEXT_PUBLIC_ENABLE_RATE_LIMITER="false"
|
22 |
+
|
23 |
+
# ------------- PROVIDER AUTH ------------
|
24 |
+
# You only need to configure the access token(s) for the provider(s) you want to use
|
25 |
+
|
26 |
+
# HuggingFace.co token: available for the LLM engine and the RENDERING engine
|
27 |
+
AUTH_HF_API_TOKEN=
|
28 |
+
|
29 |
+
# Replicate.com token: available for the RENDERING engine
|
30 |
+
AUTH_REPLICATE_API_TOKEN=
|
31 |
+
|
32 |
+
# OpenAI.dom token: available for the LLM engine and the RENDERING engine
|
33 |
+
AUTH_OPENAI_TOKEN=
|
34 |
|
35 |
+
# An experimental RENDERING engine (sorry it is not very documented yet, so you can use one of the other engines)
|
36 |
+
AUTH_VIDEOCHAIN_API_TOKEN=
|
37 |
|
38 |
+
# ------------- RENDERING API CONFIG --------------
|
|
|
|
|
39 |
|
40 |
+
# If you decided to use Replicate for the RENDERING engine
|
41 |
+
RENDERING_REPLICATE_API_MODEL="stabilityai/sdxl"
|
42 |
+
RENDERING_REPLICATE_API_MODEL_VERSION="da77bc59ee60423279fd632efb4795ab731d9e3ca9705ef3341091fb989b7eaf"
|
43 |
|
44 |
+
# If you decided to use a private Hugging Face Inference Endpoint for the RENDERING engine
|
45 |
+
RENDERING_HF_INFERENCE_ENDPOINT_URL="https://XXXXXXXXXX.endpoints.huggingface.cloud"
|
46 |
+
|
47 |
+
# If you decided to use a Hugging Face Inference API model for the RENDERING engine
|
48 |
+
RENDERING_HF_INFERENCE_API_MODEL="stabilityai/stable-diffusion-xl-base-1.0"
|
49 |
+
|
50 |
+
# An experimental RENDERING engine (sorry it is not very documented yet, so you can use one of the other engines)
|
51 |
+
RENDERING_VIDEOCHAIN_API_URL="http://localhost:7860"
|
52 |
+
|
53 |
+
# ------------- LLM API CONFIG ----------------
|
54 |
+
|
55 |
+
# If you decided to use OpenAI for the LLM engine
|
56 |
+
LLM_OPENAI_API_BASE_URL="https://api.openai.com/v1"
|
57 |
+
LLM_OPENAI_API_MODEL="gpt-3.5-turbo"
|
58 |
+
|
59 |
+
# If you decided to use a private Hugging Face Inference Endpoint for the LLM engine
|
60 |
+
LLM_HF_INFERENCE_ENDPOINT_URL=""
|
61 |
+
|
62 |
+
# If you decided to use a Hugging Face Inference API model for the LLM engine
|
63 |
+
LLM_HF_INFERENCE_API_MODEL="meta-llama/Llama-2-70b-chat-hf"
|
64 |
|
65 |
# ----------- COMMUNITY SHARING (OPTIONAL) -----------
|
|
|
66 |
# You don't need those community sharing options to run the AI Comic Factory
|
67 |
# locally or on your own server (they are meant to be used by the Hugging Face team)
|
68 |
+
NEXT_PUBLIC_ENABLE_COMMUNITY_SHARING="false"
|
69 |
COMMUNITY_API_URL=
|
70 |
COMMUNITY_API_TOKEN=
|
71 |
COMMUNITY_API_ID=
|
72 |
+
|
73 |
+
# ----------- CENSORSHIP (OPTIONAL) -----------
|
74 |
+
# censorship is currently disabled, but will be required when we create a "community roll"
|
75 |
+
# (a public repositoruy of user-generated comic strips)
|
76 |
+
ENABLE_CENSORSHIP="false"
|
77 |
+
|
78 |
+
# Due to the sensitive nature of some of keywords we want to ban (users try all kind of crazy illegal things)
|
79 |
+
# the words are are not put in clear in the source code, but behind an encryption key
|
80 |
+
# (I don't want the project to be flagged by an AI robot police on GitHub or something)
|
81 |
+
SECRET_FINGERPRINT=""
|
CONTRIBUTORS.md
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
This project was developed by Julian Bilcke (@jbilcke-hf), as part of his work at Hugging Face.
|
2 |
+
|
3 |
+
------------------------------------------
|
4 |
+
|
5 |
+
A huge thanks to external developers for their contributions!
|
6 |
+
|
7 |
+
艾逗笔 (@idoubi):
|
8 |
+
- Added support for OpenAI: https://github.com/jbilcke-hf/ai-comic-factory/pull/6
|
9 |
+
|
LICENCE.md
ADDED
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Apache License
|
2 |
+
==============
|
3 |
+
|
4 |
+
_Version 2.0, January 2004_
|
5 |
+
_<<http://www.apache.org/licenses/>>_
|
6 |
+
|
7 |
+
### Terms and Conditions for use, reproduction, and distribution
|
8 |
+
|
9 |
+
#### 1. Definitions
|
10 |
+
|
11 |
+
“License” shall mean the terms and conditions for use, reproduction, and
|
12 |
+
distribution as defined by Sections 1 through 9 of this document.
|
13 |
+
|
14 |
+
“Licensor” shall mean the copyright owner or entity authorized by the copyright
|
15 |
+
owner that is granting the License.
|
16 |
+
|
17 |
+
“Legal Entity” shall mean the union of the acting entity and all other entities
|
18 |
+
that control, are controlled by, or are under common control with that entity.
|
19 |
+
For the purposes of this definition, “control” means **(i)** the power, direct or
|
20 |
+
indirect, to cause the direction or management of such entity, whether by
|
21 |
+
contract or otherwise, or **(ii)** ownership of fifty percent (50%) or more of the
|
22 |
+
outstanding shares, or **(iii)** beneficial ownership of such entity.
|
23 |
+
|
24 |
+
“You” (or “Your”) shall mean an individual or Legal Entity exercising
|
25 |
+
permissions granted by this License.
|
26 |
+
|
27 |
+
“Source” form shall mean the preferred form for making modifications, including
|
28 |
+
but not limited to software source code, documentation source, and configuration
|
29 |
+
files.
|
30 |
+
|
31 |
+
“Object” form shall mean any form resulting from mechanical transformation or
|
32 |
+
translation of a Source form, including but not limited to compiled object code,
|
33 |
+
generated documentation, and conversions to other media types.
|
34 |
+
|
35 |
+
“Work” shall mean the work of authorship, whether in Source or Object form, made
|
36 |
+
available under the License, as indicated by a copyright notice that is included
|
37 |
+
in or attached to the work (an example is provided in the Appendix below).
|
38 |
+
|
39 |
+
“Derivative Works” shall mean any work, whether in Source or Object form, that
|
40 |
+
is based on (or derived from) the Work and for which the editorial revisions,
|
41 |
+
annotations, elaborations, or other modifications represent, as a whole, an
|
42 |
+
original work of authorship. For the purposes of this License, Derivative Works
|
43 |
+
shall not include works that remain separable from, or merely link (or bind by
|
44 |
+
name) to the interfaces of, the Work and Derivative Works thereof.
|
45 |
+
|
46 |
+
“Contribution” shall mean any work of authorship, including the original version
|
47 |
+
of the Work and any modifications or additions to that Work or Derivative Works
|
48 |
+
thereof, that is intentionally submitted to Licensor for inclusion in the Work
|
49 |
+
by the copyright owner or by an individual or Legal Entity authorized to submit
|
50 |
+
on behalf of the copyright owner. For the purposes of this definition,
|
51 |
+
“submitted” means any form of electronic, verbal, or written communication sent
|
52 |
+
to the Licensor or its representatives, including but not limited to
|
53 |
+
communication on electronic mailing lists, source code control systems, and
|
54 |
+
issue tracking systems that are managed by, or on behalf of, the Licensor for
|
55 |
+
the purpose of discussing and improving the Work, but excluding communication
|
56 |
+
that is conspicuously marked or otherwise designated in writing by the copyright
|
57 |
+
owner as “Not a Contribution.”
|
58 |
+
|
59 |
+
“Contributor” shall mean Licensor and any individual or Legal Entity on behalf
|
60 |
+
of whom a Contribution has been received by Licensor and subsequently
|
61 |
+
incorporated within the Work.
|
62 |
+
|
63 |
+
#### 2. Grant of Copyright License
|
64 |
+
|
65 |
+
Subject to the terms and conditions of this License, each Contributor hereby
|
66 |
+
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
67 |
+
irrevocable copyright license to reproduce, prepare Derivative Works of,
|
68 |
+
publicly display, publicly perform, sublicense, and distribute the Work and such
|
69 |
+
Derivative Works in Source or Object form.
|
70 |
+
|
71 |
+
#### 3. Grant of Patent License
|
72 |
+
|
73 |
+
Subject to the terms and conditions of this License, each Contributor hereby
|
74 |
+
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
75 |
+
irrevocable (except as stated in this section) patent license to make, have
|
76 |
+
made, use, offer to sell, sell, import, and otherwise transfer the Work, where
|
77 |
+
such license applies only to those patent claims licensable by such Contributor
|
78 |
+
that are necessarily infringed by their Contribution(s) alone or by combination
|
79 |
+
of their Contribution(s) with the Work to which such Contribution(s) was
|
80 |
+
submitted. If You institute patent litigation against any entity (including a
|
81 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work or a
|
82 |
+
Contribution incorporated within the Work constitutes direct or contributory
|
83 |
+
patent infringement, then any patent licenses granted to You under this License
|
84 |
+
for that Work shall terminate as of the date such litigation is filed.
|
85 |
+
|
86 |
+
#### 4. Redistribution
|
87 |
+
|
88 |
+
You may reproduce and distribute copies of the Work or Derivative Works thereof
|
89 |
+
in any medium, with or without modifications, and in Source or Object form,
|
90 |
+
provided that You meet the following conditions:
|
91 |
+
|
92 |
+
* **(a)** You must give any other recipients of the Work or Derivative Works a copy of
|
93 |
+
this License; and
|
94 |
+
* **(b)** You must cause any modified files to carry prominent notices stating that You
|
95 |
+
changed the files; and
|
96 |
+
* **(c)** You must retain, in the Source form of any Derivative Works that You distribute,
|
97 |
+
all copyright, patent, trademark, and attribution notices from the Source form
|
98 |
+
of the Work, excluding those notices that do not pertain to any part of the
|
99 |
+
Derivative Works; and
|
100 |
+
* **(d)** If the Work includes a “NOTICE” text file as part of its distribution, then any
|
101 |
+
Derivative Works that You distribute must include a readable copy of the
|
102 |
+
attribution notices contained within such NOTICE file, excluding those notices
|
103 |
+
that do not pertain to any part of the Derivative Works, in at least one of the
|
104 |
+
following places: within a NOTICE text file distributed as part of the
|
105 |
+
Derivative Works; within the Source form or documentation, if provided along
|
106 |
+
with the Derivative Works; or, within a display generated by the Derivative
|
107 |
+
Works, if and wherever such third-party notices normally appear. The contents of
|
108 |
+
the NOTICE file are for informational purposes only and do not modify the
|
109 |
+
License. You may add Your own attribution notices within Derivative Works that
|
110 |
+
You distribute, alongside or as an addendum to the NOTICE text from the Work,
|
111 |
+
provided that such additional attribution notices cannot be construed as
|
112 |
+
modifying the License.
|
113 |
+
|
114 |
+
You may add Your own copyright statement to Your modifications and may provide
|
115 |
+
additional or different license terms and conditions for use, reproduction, or
|
116 |
+
distribution of Your modifications, or for any such Derivative Works as a whole,
|
117 |
+
provided Your use, reproduction, and distribution of the Work otherwise complies
|
118 |
+
with the conditions stated in this License.
|
119 |
+
|
120 |
+
#### 5. Submission of Contributions
|
121 |
+
|
122 |
+
Unless You explicitly state otherwise, any Contribution intentionally submitted
|
123 |
+
for inclusion in the Work by You to the Licensor shall be under the terms and
|
124 |
+
conditions of this License, without any additional terms or conditions.
|
125 |
+
Notwithstanding the above, nothing herein shall supersede or modify the terms of
|
126 |
+
any separate license agreement you may have executed with Licensor regarding
|
127 |
+
such Contributions.
|
128 |
+
|
129 |
+
#### 6. Trademarks
|
130 |
+
|
131 |
+
This License does not grant permission to use the trade names, trademarks,
|
132 |
+
service marks, or product names of the Licensor, except as required for
|
133 |
+
reasonable and customary use in describing the origin of the Work and
|
134 |
+
reproducing the content of the NOTICE file.
|
135 |
+
|
136 |
+
#### 7. Disclaimer of Warranty
|
137 |
+
|
138 |
+
Unless required by applicable law or agreed to in writing, Licensor provides the
|
139 |
+
Work (and each Contributor provides its Contributions) on an “AS IS” BASIS,
|
140 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
|
141 |
+
including, without limitation, any warranties or conditions of TITLE,
|
142 |
+
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
|
143 |
+
solely responsible for determining the appropriateness of using or
|
144 |
+
redistributing the Work and assume any risks associated with Your exercise of
|
145 |
+
permissions under this License.
|
146 |
+
|
147 |
+
#### 8. Limitation of Liability
|
148 |
+
|
149 |
+
In no event and under no legal theory, whether in tort (including negligence),
|
150 |
+
contract, or otherwise, unless required by applicable law (such as deliberate
|
151 |
+
and grossly negligent acts) or agreed to in writing, shall any Contributor be
|
152 |
+
liable to You for damages, including any direct, indirect, special, incidental,
|
153 |
+
or consequential damages of any character arising as a result of this License or
|
154 |
+
out of the use or inability to use the Work (including but not limited to
|
155 |
+
damages for loss of goodwill, work stoppage, computer failure or malfunction, or
|
156 |
+
any and all other commercial damages or losses), even if such Contributor has
|
157 |
+
been advised of the possibility of such damages.
|
158 |
+
|
159 |
+
#### 9. Accepting Warranty or Additional Liability
|
160 |
+
|
161 |
+
While redistributing the Work or Derivative Works thereof, You may choose to
|
162 |
+
offer, and charge a fee for, acceptance of support, warranty, indemnity, or
|
163 |
+
other liability obligations and/or rights consistent with this License. However,
|
164 |
+
in accepting such obligations, You may act only on Your own behalf and on Your
|
165 |
+
sole responsibility, not on behalf of any other Contributor, and only if You
|
166 |
+
agree to indemnify, defend, and hold each Contributor harmless for any liability
|
167 |
+
incurred by, or claims asserted against, such Contributor by reason of your
|
168 |
+
accepting any such warranty or additional liability.
|
169 |
+
|
170 |
+
_END OF TERMS AND CONDITIONS_
|
README.md
CHANGED
@@ -6,6 +6,7 @@ colorTo: yellow
|
|
6 |
sdk: docker
|
7 |
pinned: true
|
8 |
app_port: 3000
|
|
|
9 |
---
|
10 |
|
11 |
# AI Comic Factory
|
@@ -17,17 +18,29 @@ First, I would like to highlight that everything is open-source (see [here](http
|
|
17 |
However the project isn't a monolithic Space that can be duplicated and ran immediately:
|
18 |
it requires various components to run for the frontend, backend, LLM, SDXL etc.
|
19 |
|
20 |
-
If you try to duplicate the project
|
21 |
-
|
22 |
-
|
23 |
-
- `
|
24 |
-
- `
|
25 |
-
|
26 |
-
|
27 |
-
- `
|
28 |
-
- `
|
29 |
-
- `
|
30 |
-
- `
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
|
32 |
In addition, there are some community sharing variables that you can just ignore.
|
33 |
Those variables are not required to run the AI Comic Factory on your own website or computer
|
|
|
6 |
sdk: docker
|
7 |
pinned: true
|
8 |
app_port: 3000
|
9 |
+
disable_embedding: true
|
10 |
---
|
11 |
|
12 |
# AI Comic Factory
|
|
|
18 |
However the project isn't a monolithic Space that can be duplicated and ran immediately:
|
19 |
it requires various components to run for the frontend, backend, LLM, SDXL etc.
|
20 |
|
21 |
+
If you try to duplicate the project, open the `.env` you will see it requires some variables.
|
22 |
+
|
23 |
+
Provider config:
|
24 |
+
- `LLM_ENGINE`: can be one of: "INFERENCE_API", "INFERENCE_ENDPOINT", "OPENAI"
|
25 |
+
- `RENDERING_ENGINE`: can be one of: "INFERENCE_API", "INFERENCE_ENDPOINT", "REPLICATE", "VIDEOCHAIN" for now, unless you code your custom solution
|
26 |
+
|
27 |
+
Auth config:
|
28 |
+
- `AUTH_HF_API_TOKEN`: only if you decide to use OpenAI for the LLM engine necessary if you decide to use an inference api model or a custom inference endpoint
|
29 |
+
- `AUTH_OPENAI_TOKEN`: only if you decide to use OpenAI for the LLM engine
|
30 |
+
- `AITH_VIDEOCHAIN_API_TOKEN`: secret token to access the VideoChain API server
|
31 |
+
- `AUTH_REPLICATE_API_TOKEN`: in case you want to use Replicate.com
|
32 |
+
|
33 |
+
Rendering config:
|
34 |
+
- `RENDERING_HF_INFERENCE_ENDPOINT_URL`: necessary if you decide to use a custom inference endpoint
|
35 |
+
- `RENDERING_REPLICATE_API_MODEL_VERSION`: url to the VideoChain API server
|
36 |
+
- `RENDERING_HF_INFERENCE_ENDPOINT_URL`: optional, default to nothing
|
37 |
+
- `RENDERING_HF_INFERENCE_API_MODEL`: optional, defaults to "stabilityai/stable-diffusion-xl-base-1.0"
|
38 |
+
- `RENDERING_REPLICATE_API_MODEL`: optional, defaults to "stabilityai/sdxl"
|
39 |
+
- `RENDERING_REPLICATE_API_MODEL_VERSION`: optional, in case you want to change the version
|
40 |
+
|
41 |
+
Language model config:
|
42 |
+
- `LLM_HF_INFERENCE_ENDPOINT_URL`: "https://llama-v2-70b-chat.ngrok.io"
|
43 |
+
- `LLM_HF_INFERENCE_API_MODEL`: "codellama/CodeLlama-7b-hf"
|
44 |
|
45 |
In addition, there are some community sharing variables that you can just ignore.
|
46 |
Those variables are not required to run the AI Comic Factory on your own website or computer
|
package-lock.json
CHANGED
@@ -37,16 +37,19 @@
|
|
37 |
"cmdk": "^0.2.0",
|
38 |
"cookies-next": "^2.1.2",
|
39 |
"date-fns": "^2.30.0",
|
|
|
40 |
"eslint": "8.45.0",
|
41 |
"eslint-config-next": "13.4.10",
|
42 |
"html2canvas": "^1.4.1",
|
43 |
"lucide-react": "^0.260.0",
|
44 |
"next": "13.4.10",
|
|
|
45 |
"pick": "^0.0.1",
|
46 |
"postcss": "8.4.26",
|
47 |
"react": "18.2.0",
|
48 |
"react-circular-progressbar": "^2.1.0",
|
49 |
"react-dom": "18.2.0",
|
|
|
50 |
"react-virtualized-auto-sizer": "^1.0.20",
|
51 |
"replicate": "^0.17.0",
|
52 |
"sbd": "^1.0.19",
|
@@ -3691,6 +3694,15 @@
|
|
3691 |
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.2.tgz",
|
3692 |
"integrity": "sha512-Dd0BYtWgnWJKwO1jkmTrzofjK2QXXcai0dmtzvIBhcA+RsG5h8R3xlyta0kGOZRNfL9GuRtb1knmPEhQrePCEw=="
|
3693 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3694 |
"node_modules/@types/prop-types": {
|
3695 |
"version": "15.7.5",
|
3696 |
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz",
|
@@ -3847,6 +3859,17 @@
|
|
3847 |
"url": "https://opencollective.com/typescript-eslint"
|
3848 |
}
|
3849 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3850 |
"node_modules/abs-svg-path": {
|
3851 |
"version": "0.1.1",
|
3852 |
"resolved": "https://registry.npmjs.org/abs-svg-path/-/abs-svg-path-0.1.1.tgz",
|
@@ -3879,6 +3902,17 @@
|
|
3879 |
"node": ">=0.4.0"
|
3880 |
}
|
3881 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3882 |
"node_modules/ajv": {
|
3883 |
"version": "6.12.6",
|
3884 |
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
|
@@ -4096,6 +4130,11 @@
|
|
4096 |
"has-symbols": "^1.0.3"
|
4097 |
}
|
4098 |
},
|
|
|
|
|
|
|
|
|
|
|
4099 |
"node_modules/autoprefixer": {
|
4100 |
"version": "10.4.14",
|
4101 |
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.14.tgz",
|
@@ -4209,6 +4248,11 @@
|
|
4209 |
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
4210 |
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
|
4211 |
},
|
|
|
|
|
|
|
|
|
|
|
4212 |
"node_modules/base64-arraybuffer": {
|
4213 |
"version": "1.0.2",
|
4214 |
"resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz",
|
@@ -4430,6 +4474,14 @@
|
|
4430 |
"url": "https://github.com/chalk/chalk?sponsor=1"
|
4431 |
}
|
4432 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4433 |
"node_modules/chokidar": {
|
4434 |
"version": "3.5.3",
|
4435 |
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
|
@@ -4795,6 +4847,17 @@
|
|
4795 |
"simple-swizzle": "^0.2.2"
|
4796 |
}
|
4797 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4798 |
"node_modules/command-score": {
|
4799 |
"version": "0.1.2",
|
4800 |
"resolved": "https://registry.npmjs.org/command-score/-/command-score-0.1.2.tgz",
|
@@ -4879,6 +4942,14 @@
|
|
4879 |
"node": ">= 8"
|
4880 |
}
|
4881 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4882 |
"node_modules/crypto-js": {
|
4883 |
"version": "4.1.1",
|
4884 |
"resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-4.1.1.tgz",
|
@@ -5012,6 +5083,14 @@
|
|
5012 |
"url": "https://github.com/sponsors/ljharb"
|
5013 |
}
|
5014 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5015 |
"node_modules/dequal": {
|
5016 |
"version": "2.0.3",
|
5017 |
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
|
@@ -5051,6 +5130,15 @@
|
|
5051 |
"node": ">=0.3.1"
|
5052 |
}
|
5053 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5054 |
"node_modules/dir-glob": {
|
5055 |
"version": "3.0.1",
|
5056 |
"resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
|
@@ -5139,6 +5227,14 @@
|
|
5139 |
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.2.1.tgz",
|
5140 |
"integrity": "sha512-97g6QgOk8zlDRdgq1WxwgTMgEWGVAQvB5Fdpgc1MkNy56la5SKP9GsMXKDOdqwn90/41a8yPwIGk1Y6WVbeMQA=="
|
5141 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5142 |
"node_modules/end-of-stream": {
|
5143 |
"version": "1.4.4",
|
5144 |
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
|
@@ -5697,6 +5793,14 @@
|
|
5697 |
"node": ">=0.10.0"
|
5698 |
}
|
5699 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5700 |
"node_modules/events": {
|
5701 |
"version": "3.3.0",
|
5702 |
"resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
|
@@ -5846,6 +5950,36 @@
|
|
5846 |
"is-callable": "^1.1.3"
|
5847 |
}
|
5848 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5849 |
"node_modules/fraction.js": {
|
5850 |
"version": "4.2.1",
|
5851 |
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.1.tgz",
|
@@ -6197,11 +6331,30 @@
|
|
6197 |
"entities": "^4.4.0"
|
6198 |
}
|
6199 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6200 |
"node_modules/hyphen": {
|
6201 |
"version": "1.6.6",
|
6202 |
"resolved": "https://registry.npmjs.org/hyphen/-/hyphen-1.6.6.tgz",
|
6203 |
"integrity": "sha512-XtqmnT+b9n5MX+MsqluFAVTIenbtC25iskW0Z+jLd+awfhA+ZbWKWQMIvLJccGoa2bM1R6juWJ27cZxIFOmkWw=="
|
6204 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6205 |
"node_modules/idb-keyval": {
|
6206 |
"version": "6.2.1",
|
6207 |
"resolved": "https://registry.npmjs.org/idb-keyval/-/idb-keyval-6.2.1.tgz",
|
@@ -6366,6 +6519,11 @@
|
|
6366 |
"url": "https://github.com/sponsors/ljharb"
|
6367 |
}
|
6368 |
},
|
|
|
|
|
|
|
|
|
|
|
6369 |
"node_modules/is-callable": {
|
6370 |
"version": "1.2.7",
|
6371 |
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
|
@@ -6851,6 +7009,16 @@
|
|
6851 |
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
|
6852 |
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw=="
|
6853 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6854 |
"node_modules/media-engine": {
|
6855 |
"version": "1.0.3",
|
6856 |
"resolved": "https://registry.npmjs.org/media-engine/-/media-engine-1.0.3.tgz",
|
@@ -6876,6 +7044,25 @@
|
|
6876 |
"node": ">=8.6"
|
6877 |
}
|
6878 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6879 |
"node_modules/mimic-response": {
|
6880 |
"version": "3.1.0",
|
6881 |
"resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz",
|
@@ -7050,6 +7237,24 @@
|
|
7050 |
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-6.1.0.tgz",
|
7051 |
"integrity": "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA=="
|
7052 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7053 |
"node_modules/node-fetch": {
|
7054 |
"version": "2.7.0",
|
7055 |
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
@@ -7223,6 +7428,29 @@
|
|
7223 |
"wrappy": "1"
|
7224 |
}
|
7225 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7226 |
"node_modules/opencollective-postinstall": {
|
7227 |
"version": "2.0.3",
|
7228 |
"resolved": "https://registry.npmjs.org/opencollective-postinstall/-/opencollective-postinstall-2.0.3.tgz",
|
@@ -7681,6 +7909,14 @@
|
|
7681 |
"loose-envify": "^1.1.0"
|
7682 |
}
|
7683 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7684 |
"node_modules/react-is": {
|
7685 |
"version": "16.13.1",
|
7686 |
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
|
@@ -8043,6 +8279,11 @@
|
|
8043 |
"url": "https://github.com/sponsors/ljharb"
|
8044 |
}
|
8045 |
},
|
|
|
|
|
|
|
|
|
|
|
8046 |
"node_modules/sanitize-html": {
|
8047 |
"version": "2.11.0",
|
8048 |
"resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.11.0.tgz",
|
@@ -9067,6 +9308,14 @@
|
|
9067 |
"node": ">=10.13.0"
|
9068 |
}
|
9069 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9070 |
"node_modules/webidl-conversions": {
|
9071 |
"version": "3.0.1",
|
9072 |
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
|
|
37 |
"cmdk": "^0.2.0",
|
38 |
"cookies-next": "^2.1.2",
|
39 |
"date-fns": "^2.30.0",
|
40 |
+
"encoding": "^0.1.13",
|
41 |
"eslint": "8.45.0",
|
42 |
"eslint-config-next": "13.4.10",
|
43 |
"html2canvas": "^1.4.1",
|
44 |
"lucide-react": "^0.260.0",
|
45 |
"next": "13.4.10",
|
46 |
+
"openai": "^4.10.0",
|
47 |
"pick": "^0.0.1",
|
48 |
"postcss": "8.4.26",
|
49 |
"react": "18.2.0",
|
50 |
"react-circular-progressbar": "^2.1.0",
|
51 |
"react-dom": "18.2.0",
|
52 |
+
"react-icons": "^4.11.0",
|
53 |
"react-virtualized-auto-sizer": "^1.0.20",
|
54 |
"replicate": "^0.17.0",
|
55 |
"sbd": "^1.0.19",
|
|
|
3694 |
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.2.tgz",
|
3695 |
"integrity": "sha512-Dd0BYtWgnWJKwO1jkmTrzofjK2QXXcai0dmtzvIBhcA+RsG5h8R3xlyta0kGOZRNfL9GuRtb1knmPEhQrePCEw=="
|
3696 |
},
|
3697 |
+
"node_modules/@types/node-fetch": {
|
3698 |
+
"version": "2.6.6",
|
3699 |
+
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.6.tgz",
|
3700 |
+
"integrity": "sha512-95X8guJYhfqiuVVhRFxVQcf4hW/2bCuoPwDasMf/531STFoNoWTT7YDnWdXHEZKqAGUigmpG31r2FE70LwnzJw==",
|
3701 |
+
"dependencies": {
|
3702 |
+
"@types/node": "*",
|
3703 |
+
"form-data": "^4.0.0"
|
3704 |
+
}
|
3705 |
+
},
|
3706 |
"node_modules/@types/prop-types": {
|
3707 |
"version": "15.7.5",
|
3708 |
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz",
|
|
|
3859 |
"url": "https://opencollective.com/typescript-eslint"
|
3860 |
}
|
3861 |
},
|
3862 |
+
"node_modules/abort-controller": {
|
3863 |
+
"version": "3.0.0",
|
3864 |
+
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
3865 |
+
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
|
3866 |
+
"dependencies": {
|
3867 |
+
"event-target-shim": "^5.0.0"
|
3868 |
+
},
|
3869 |
+
"engines": {
|
3870 |
+
"node": ">=6.5"
|
3871 |
+
}
|
3872 |
+
},
|
3873 |
"node_modules/abs-svg-path": {
|
3874 |
"version": "0.1.1",
|
3875 |
"resolved": "https://registry.npmjs.org/abs-svg-path/-/abs-svg-path-0.1.1.tgz",
|
|
|
3902 |
"node": ">=0.4.0"
|
3903 |
}
|
3904 |
},
|
3905 |
+
"node_modules/agentkeepalive": {
|
3906 |
+
"version": "4.5.0",
|
3907 |
+
"resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz",
|
3908 |
+
"integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==",
|
3909 |
+
"dependencies": {
|
3910 |
+
"humanize-ms": "^1.2.1"
|
3911 |
+
},
|
3912 |
+
"engines": {
|
3913 |
+
"node": ">= 8.0.0"
|
3914 |
+
}
|
3915 |
+
},
|
3916 |
"node_modules/ajv": {
|
3917 |
"version": "6.12.6",
|
3918 |
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
|
|
|
4130 |
"has-symbols": "^1.0.3"
|
4131 |
}
|
4132 |
},
|
4133 |
+
"node_modules/asynckit": {
|
4134 |
+
"version": "0.4.0",
|
4135 |
+
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
4136 |
+
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
4137 |
+
},
|
4138 |
"node_modules/autoprefixer": {
|
4139 |
"version": "10.4.14",
|
4140 |
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.14.tgz",
|
|
|
4248 |
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
4249 |
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
|
4250 |
},
|
4251 |
+
"node_modules/base-64": {
|
4252 |
+
"version": "0.1.0",
|
4253 |
+
"resolved": "https://registry.npmjs.org/base-64/-/base-64-0.1.0.tgz",
|
4254 |
+
"integrity": "sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA=="
|
4255 |
+
},
|
4256 |
"node_modules/base64-arraybuffer": {
|
4257 |
"version": "1.0.2",
|
4258 |
"resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz",
|
|
|
4474 |
"url": "https://github.com/chalk/chalk?sponsor=1"
|
4475 |
}
|
4476 |
},
|
4477 |
+
"node_modules/charenc": {
|
4478 |
+
"version": "0.0.2",
|
4479 |
+
"resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz",
|
4480 |
+
"integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==",
|
4481 |
+
"engines": {
|
4482 |
+
"node": "*"
|
4483 |
+
}
|
4484 |
+
},
|
4485 |
"node_modules/chokidar": {
|
4486 |
"version": "3.5.3",
|
4487 |
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
|
|
|
4847 |
"simple-swizzle": "^0.2.2"
|
4848 |
}
|
4849 |
},
|
4850 |
+
"node_modules/combined-stream": {
|
4851 |
+
"version": "1.0.8",
|
4852 |
+
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
4853 |
+
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
4854 |
+
"dependencies": {
|
4855 |
+
"delayed-stream": "~1.0.0"
|
4856 |
+
},
|
4857 |
+
"engines": {
|
4858 |
+
"node": ">= 0.8"
|
4859 |
+
}
|
4860 |
+
},
|
4861 |
"node_modules/command-score": {
|
4862 |
"version": "0.1.2",
|
4863 |
"resolved": "https://registry.npmjs.org/command-score/-/command-score-0.1.2.tgz",
|
|
|
4942 |
"node": ">= 8"
|
4943 |
}
|
4944 |
},
|
4945 |
+
"node_modules/crypt": {
|
4946 |
+
"version": "0.0.2",
|
4947 |
+
"resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz",
|
4948 |
+
"integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==",
|
4949 |
+
"engines": {
|
4950 |
+
"node": "*"
|
4951 |
+
}
|
4952 |
+
},
|
4953 |
"node_modules/crypto-js": {
|
4954 |
"version": "4.1.1",
|
4955 |
"resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-4.1.1.tgz",
|
|
|
5083 |
"url": "https://github.com/sponsors/ljharb"
|
5084 |
}
|
5085 |
},
|
5086 |
+
"node_modules/delayed-stream": {
|
5087 |
+
"version": "1.0.0",
|
5088 |
+
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
5089 |
+
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
5090 |
+
"engines": {
|
5091 |
+
"node": ">=0.4.0"
|
5092 |
+
}
|
5093 |
+
},
|
5094 |
"node_modules/dequal": {
|
5095 |
"version": "2.0.3",
|
5096 |
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
|
|
|
5130 |
"node": ">=0.3.1"
|
5131 |
}
|
5132 |
},
|
5133 |
+
"node_modules/digest-fetch": {
|
5134 |
+
"version": "1.3.0",
|
5135 |
+
"resolved": "https://registry.npmjs.org/digest-fetch/-/digest-fetch-1.3.0.tgz",
|
5136 |
+
"integrity": "sha512-CGJuv6iKNM7QyZlM2T3sPAdZWd/p9zQiRNS9G+9COUCwzWFTs0Xp8NF5iePx7wtvhDykReiRRrSeNb4oMmB8lA==",
|
5137 |
+
"dependencies": {
|
5138 |
+
"base-64": "^0.1.0",
|
5139 |
+
"md5": "^2.3.0"
|
5140 |
+
}
|
5141 |
+
},
|
5142 |
"node_modules/dir-glob": {
|
5143 |
"version": "3.0.1",
|
5144 |
"resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
|
|
|
5227 |
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.2.1.tgz",
|
5228 |
"integrity": "sha512-97g6QgOk8zlDRdgq1WxwgTMgEWGVAQvB5Fdpgc1MkNy56la5SKP9GsMXKDOdqwn90/41a8yPwIGk1Y6WVbeMQA=="
|
5229 |
},
|
5230 |
+
"node_modules/encoding": {
|
5231 |
+
"version": "0.1.13",
|
5232 |
+
"resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz",
|
5233 |
+
"integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==",
|
5234 |
+
"dependencies": {
|
5235 |
+
"iconv-lite": "^0.6.2"
|
5236 |
+
}
|
5237 |
+
},
|
5238 |
"node_modules/end-of-stream": {
|
5239 |
"version": "1.4.4",
|
5240 |
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
|
|
|
5793 |
"node": ">=0.10.0"
|
5794 |
}
|
5795 |
},
|
5796 |
+
"node_modules/event-target-shim": {
|
5797 |
+
"version": "5.0.1",
|
5798 |
+
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
|
5799 |
+
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
|
5800 |
+
"engines": {
|
5801 |
+
"node": ">=6"
|
5802 |
+
}
|
5803 |
+
},
|
5804 |
"node_modules/events": {
|
5805 |
"version": "3.3.0",
|
5806 |
"resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
|
|
|
5950 |
"is-callable": "^1.1.3"
|
5951 |
}
|
5952 |
},
|
5953 |
+
"node_modules/form-data": {
|
5954 |
+
"version": "4.0.0",
|
5955 |
+
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
5956 |
+
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
5957 |
+
"dependencies": {
|
5958 |
+
"asynckit": "^0.4.0",
|
5959 |
+
"combined-stream": "^1.0.8",
|
5960 |
+
"mime-types": "^2.1.12"
|
5961 |
+
},
|
5962 |
+
"engines": {
|
5963 |
+
"node": ">= 6"
|
5964 |
+
}
|
5965 |
+
},
|
5966 |
+
"node_modules/form-data-encoder": {
|
5967 |
+
"version": "1.7.2",
|
5968 |
+
"resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz",
|
5969 |
+
"integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A=="
|
5970 |
+
},
|
5971 |
+
"node_modules/formdata-node": {
|
5972 |
+
"version": "4.4.1",
|
5973 |
+
"resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz",
|
5974 |
+
"integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==",
|
5975 |
+
"dependencies": {
|
5976 |
+
"node-domexception": "1.0.0",
|
5977 |
+
"web-streams-polyfill": "4.0.0-beta.3"
|
5978 |
+
},
|
5979 |
+
"engines": {
|
5980 |
+
"node": ">= 12.20"
|
5981 |
+
}
|
5982 |
+
},
|
5983 |
"node_modules/fraction.js": {
|
5984 |
"version": "4.2.1",
|
5985 |
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.1.tgz",
|
|
|
6331 |
"entities": "^4.4.0"
|
6332 |
}
|
6333 |
},
|
6334 |
+
"node_modules/humanize-ms": {
|
6335 |
+
"version": "1.2.1",
|
6336 |
+
"resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz",
|
6337 |
+
"integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==",
|
6338 |
+
"dependencies": {
|
6339 |
+
"ms": "^2.0.0"
|
6340 |
+
}
|
6341 |
+
},
|
6342 |
"node_modules/hyphen": {
|
6343 |
"version": "1.6.6",
|
6344 |
"resolved": "https://registry.npmjs.org/hyphen/-/hyphen-1.6.6.tgz",
|
6345 |
"integrity": "sha512-XtqmnT+b9n5MX+MsqluFAVTIenbtC25iskW0Z+jLd+awfhA+ZbWKWQMIvLJccGoa2bM1R6juWJ27cZxIFOmkWw=="
|
6346 |
},
|
6347 |
+
"node_modules/iconv-lite": {
|
6348 |
+
"version": "0.6.3",
|
6349 |
+
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
|
6350 |
+
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
|
6351 |
+
"dependencies": {
|
6352 |
+
"safer-buffer": ">= 2.1.2 < 3.0.0"
|
6353 |
+
},
|
6354 |
+
"engines": {
|
6355 |
+
"node": ">=0.10.0"
|
6356 |
+
}
|
6357 |
+
},
|
6358 |
"node_modules/idb-keyval": {
|
6359 |
"version": "6.2.1",
|
6360 |
"resolved": "https://registry.npmjs.org/idb-keyval/-/idb-keyval-6.2.1.tgz",
|
|
|
6519 |
"url": "https://github.com/sponsors/ljharb"
|
6520 |
}
|
6521 |
},
|
6522 |
+
"node_modules/is-buffer": {
|
6523 |
+
"version": "1.1.6",
|
6524 |
+
"resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
|
6525 |
+
"integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w=="
|
6526 |
+
},
|
6527 |
"node_modules/is-callable": {
|
6528 |
"version": "1.2.7",
|
6529 |
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
|
|
|
7009 |
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
|
7010 |
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw=="
|
7011 |
},
|
7012 |
+
"node_modules/md5": {
|
7013 |
+
"version": "2.3.0",
|
7014 |
+
"resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz",
|
7015 |
+
"integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==",
|
7016 |
+
"dependencies": {
|
7017 |
+
"charenc": "0.0.2",
|
7018 |
+
"crypt": "0.0.2",
|
7019 |
+
"is-buffer": "~1.1.6"
|
7020 |
+
}
|
7021 |
+
},
|
7022 |
"node_modules/media-engine": {
|
7023 |
"version": "1.0.3",
|
7024 |
"resolved": "https://registry.npmjs.org/media-engine/-/media-engine-1.0.3.tgz",
|
|
|
7044 |
"node": ">=8.6"
|
7045 |
}
|
7046 |
},
|
7047 |
+
"node_modules/mime-db": {
|
7048 |
+
"version": "1.52.0",
|
7049 |
+
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
7050 |
+
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
7051 |
+
"engines": {
|
7052 |
+
"node": ">= 0.6"
|
7053 |
+
}
|
7054 |
+
},
|
7055 |
+
"node_modules/mime-types": {
|
7056 |
+
"version": "2.1.35",
|
7057 |
+
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
7058 |
+
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
7059 |
+
"dependencies": {
|
7060 |
+
"mime-db": "1.52.0"
|
7061 |
+
},
|
7062 |
+
"engines": {
|
7063 |
+
"node": ">= 0.6"
|
7064 |
+
}
|
7065 |
+
},
|
7066 |
"node_modules/mimic-response": {
|
7067 |
"version": "3.1.0",
|
7068 |
"resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz",
|
|
|
7237 |
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-6.1.0.tgz",
|
7238 |
"integrity": "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA=="
|
7239 |
},
|
7240 |
+
"node_modules/node-domexception": {
|
7241 |
+
"version": "1.0.0",
|
7242 |
+
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
|
7243 |
+
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
|
7244 |
+
"funding": [
|
7245 |
+
{
|
7246 |
+
"type": "github",
|
7247 |
+
"url": "https://github.com/sponsors/jimmywarting"
|
7248 |
+
},
|
7249 |
+
{
|
7250 |
+
"type": "github",
|
7251 |
+
"url": "https://paypal.me/jimmywarting"
|
7252 |
+
}
|
7253 |
+
],
|
7254 |
+
"engines": {
|
7255 |
+
"node": ">=10.5.0"
|
7256 |
+
}
|
7257 |
+
},
|
7258 |
"node_modules/node-fetch": {
|
7259 |
"version": "2.7.0",
|
7260 |
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
|
|
7428 |
"wrappy": "1"
|
7429 |
}
|
7430 |
},
|
7431 |
+
"node_modules/openai": {
|
7432 |
+
"version": "4.10.0",
|
7433 |
+
"resolved": "https://registry.npmjs.org/openai/-/openai-4.10.0.tgz",
|
7434 |
+
"integrity": "sha512-II4b5/7qzwYkqA9MSjgqdofCc798EW+dtF2h6qNaVLet+qO7FShAJTWnoyzb50J4ZH1rPxRFAsmDLIhY3PT6DQ==",
|
7435 |
+
"dependencies": {
|
7436 |
+
"@types/node": "^18.11.18",
|
7437 |
+
"@types/node-fetch": "^2.6.4",
|
7438 |
+
"abort-controller": "^3.0.0",
|
7439 |
+
"agentkeepalive": "^4.2.1",
|
7440 |
+
"digest-fetch": "^1.3.0",
|
7441 |
+
"form-data-encoder": "1.7.2",
|
7442 |
+
"formdata-node": "^4.3.2",
|
7443 |
+
"node-fetch": "^2.6.7"
|
7444 |
+
},
|
7445 |
+
"bin": {
|
7446 |
+
"openai": "bin/cli"
|
7447 |
+
}
|
7448 |
+
},
|
7449 |
+
"node_modules/openai/node_modules/@types/node": {
|
7450 |
+
"version": "18.17.19",
|
7451 |
+
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.17.19.tgz",
|
7452 |
+
"integrity": "sha512-+pMhShR3Or5GR0/sp4Da7FnhVmTalWm81M6MkEldbwjETSaPalw138Z4KdpQaistvqQxLB7Cy4xwYdxpbSOs9Q=="
|
7453 |
+
},
|
7454 |
"node_modules/opencollective-postinstall": {
|
7455 |
"version": "2.0.3",
|
7456 |
"resolved": "https://registry.npmjs.org/opencollective-postinstall/-/opencollective-postinstall-2.0.3.tgz",
|
|
|
7909 |
"loose-envify": "^1.1.0"
|
7910 |
}
|
7911 |
},
|
7912 |
+
"node_modules/react-icons": {
|
7913 |
+
"version": "4.11.0",
|
7914 |
+
"resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.11.0.tgz",
|
7915 |
+
"integrity": "sha512-V+4khzYcE5EBk/BvcuYRq6V/osf11ODUM2J8hg2FDSswRrGvqiYUYPRy4OdrWaQOBj4NcpJfmHZLNaD+VH0TyA==",
|
7916 |
+
"peerDependencies": {
|
7917 |
+
"react": "*"
|
7918 |
+
}
|
7919 |
+
},
|
7920 |
"node_modules/react-is": {
|
7921 |
"version": "16.13.1",
|
7922 |
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
|
|
|
8279 |
"url": "https://github.com/sponsors/ljharb"
|
8280 |
}
|
8281 |
},
|
8282 |
+
"node_modules/safer-buffer": {
|
8283 |
+
"version": "2.1.2",
|
8284 |
+
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
8285 |
+
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
8286 |
+
},
|
8287 |
"node_modules/sanitize-html": {
|
8288 |
"version": "2.11.0",
|
8289 |
"resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.11.0.tgz",
|
|
|
9308 |
"node": ">=10.13.0"
|
9309 |
}
|
9310 |
},
|
9311 |
+
"node_modules/web-streams-polyfill": {
|
9312 |
+
"version": "4.0.0-beta.3",
|
9313 |
+
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz",
|
9314 |
+
"integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==",
|
9315 |
+
"engines": {
|
9316 |
+
"node": ">= 14"
|
9317 |
+
}
|
9318 |
+
},
|
9319 |
"node_modules/webidl-conversions": {
|
9320 |
"version": "3.0.1",
|
9321 |
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
package.json
CHANGED
@@ -38,6 +38,7 @@
|
|
38 |
"cmdk": "^0.2.0",
|
39 |
"cookies-next": "^2.1.2",
|
40 |
"date-fns": "^2.30.0",
|
|
|
41 |
"eslint": "8.45.0",
|
42 |
"eslint-config-next": "13.4.10",
|
43 |
"html2canvas": "^1.4.1",
|
@@ -49,6 +50,7 @@
|
|
49 |
"react": "18.2.0",
|
50 |
"react-circular-progressbar": "^2.1.0",
|
51 |
"react-dom": "18.2.0",
|
|
|
52 |
"react-virtualized-auto-sizer": "^1.0.20",
|
53 |
"replicate": "^0.17.0",
|
54 |
"sbd": "^1.0.19",
|
|
|
38 |
"cmdk": "^0.2.0",
|
39 |
"cookies-next": "^2.1.2",
|
40 |
"date-fns": "^2.30.0",
|
41 |
+
"encoding": "^0.1.13",
|
42 |
"eslint": "8.45.0",
|
43 |
"eslint-config-next": "13.4.10",
|
44 |
"html2canvas": "^1.4.1",
|
|
|
50 |
"react": "18.2.0",
|
51 |
"react-circular-progressbar": "^2.1.0",
|
52 |
"react-dom": "18.2.0",
|
53 |
+
"react-icons": "^4.11.0",
|
54 |
"react-virtualized-auto-sizer": "^1.0.20",
|
55 |
"replicate": "^0.17.0",
|
56 |
"sbd": "^1.0.19",
|
src/app/engine/caption.ts
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
|
3 |
import { ImageAnalysisRequest, ImageAnalysisResponse } from "@/types"
|
4 |
|
5 |
-
const apiUrl = `${process.env.
|
6 |
|
7 |
export async function see({
|
8 |
prompt,
|
@@ -33,7 +33,7 @@ export async function see({
|
|
33 |
headers: {
|
34 |
Accept: "application/json",
|
35 |
"Content-Type": "application/json",
|
36 |
-
// Authorization: `Bearer ${
|
37 |
},
|
38 |
body: JSON.stringify(request),
|
39 |
cache: 'no-store',
|
|
|
2 |
|
3 |
import { ImageAnalysisRequest, ImageAnalysisResponse } from "@/types"
|
4 |
|
5 |
+
const apiUrl = `${process.env.RENDERING_VIDEOCHAIN_API_URL || ""}`
|
6 |
|
7 |
export async function see({
|
8 |
prompt,
|
|
|
33 |
headers: {
|
34 |
Accept: "application/json",
|
35 |
"Content-Type": "application/json",
|
36 |
+
// Authorization: `Bearer ${videochainApi}`,
|
37 |
},
|
38 |
body: JSON.stringify(request),
|
39 |
cache: 'no-store',
|
src/app/engine/censorship.ts
CHANGED
@@ -1,39 +1,184 @@
|
|
1 |
|
2 |
-
|
3 |
-
// unfortunately due to abuse by some users, I have to add this NSFW filter
|
4 |
-
const secretSalt = `${process.env.SECRET_CENSORSHIP_KEY || ""}`
|
5 |
-
|
6 |
-
// TODO the censorship is not implement yet actually
|
7 |
-
|
8 |
// I don't want to be banned by Replicate because bad actors are asking
|
9 |
// for some naked anime stuff or whatever
|
10 |
// I also want to avoid a PR scandal due to some bad user generated content
|
11 |
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
"
|
19 |
-
"
|
20 |
-
"
|
21 |
-
"
|
22 |
-
"
|
23 |
-
"
|
24 |
-
"
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
]
|
30 |
|
31 |
-
|
32 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
export const filterOutBadWords = (sentence: string) => {
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
}
|
|
|
1 |
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
// I don't want to be banned by Replicate because bad actors are asking
|
3 |
// for some naked anime stuff or whatever
|
4 |
// I also want to avoid a PR scandal due to some bad user generated content
|
5 |
|
6 |
+
import { computeSecretFingerprint } from "@/lib/computeSecretFingerprint"
|
7 |
+
|
8 |
+
// those keywords have been generated by looking at the logs of the panorama and the AI Comic Factory
|
9 |
+
// those are real requests some users tried to attempt.. :|
|
10 |
+
|
11 |
+
const chickens = [
|
12 |
+
"fcb4dacbd99b21368c50f29c1d47071c87cf2225ab9192282c785460391cd365",
|
13 |
+
"68840b60ac27eacaa7afe17e898d3c4a2dc71acff8c74d6782c1bcaafd14963d",
|
14 |
+
"67f745224fd6e1a7a3a244514d5807fcc994cbb62ca4ec8fa44cd14244a515ae",
|
15 |
+
"681fea565117808c6dbe002520d2cfeeb3e5c67e68630afb4a453449a9da587b",
|
16 |
+
"2f3d913b3db9e15a930aac43eb2d6fe8817db8e4bcf37794bf0227b06b718d1b",
|
17 |
+
"922a700b807e4994df82eba2b48a6ac131fe8d8d1035d06b3592d622fb232161",
|
18 |
+
"cb69ee6774eafcc720adb1f689d28acbb9f47998cbea0299ec66a58dedf91c37"
|
19 |
+
]
|
20 |
+
|
21 |
+
const ducks = [
|
22 |
+
"1c52cb20c0cbc76349fa63232b982bd394cf0850ebc17240dcf33c19fb15a26d",
|
23 |
+
"e1d4de9b8d464d7da07c276b63a42c1c9922224f0a6cab6b0826427ce4a7461a",
|
24 |
+
"0be3174bfb1a48a65875c2f035b1ae14fbc8f232f55785018de0cfe2132fa952",
|
25 |
+
"0f174769641b2e5d2c79b5a83e8ef91e004f6f3e62531cd70cfdff02159268cb",
|
26 |
+
"e9fb8ae8ff720acd91025229478a21e43e8e976e30119a76c293201adf572736",
|
27 |
+
"f65a0dc0e07b5d084ff24c69dcdb953f7b57101d2ebb716d4dfb5963076ef807",
|
28 |
+
"2bf38af1646489c2c086f811d082054cd29e23fa7bb5c525396bec01b3ab688e"
|
29 |
+
]
|
30 |
+
|
31 |
+
const cats = [
|
32 |
+
"fcffc3e997d952007d1b902a9cf40b750ba4a410ac65bfd95475996bf51359e4",
|
33 |
+
"3172a5fa159754d703489dfba5af520b8ace107cdf170f4c4cb38a6797aa163f",
|
34 |
+
"500012dbff4498a9c4513369d6b9b373fab9330ffd2cb1e622294043cc21b610",
|
35 |
+
"84e3a8d34ee7d0c8e7a2926dd1acad46a0b66b9d27725b3a7e5053550f490301"
|
36 |
+
]
|
37 |
+
|
38 |
+
const roasted = [
|
39 |
+
"a2bfbce0046c9a52a0eabf98f73e0f8e09959970431fc892ebdb4e1c97031b50",
|
40 |
+
"6eca1adf06851f99e9cdfbb496c27d46ff81106903d11f3346a146e96082b016",
|
41 |
+
"49a124c9ed6fbbad4105b3657dc25de369bcafb9d6787f610c08f584cd607d0f",
|
42 |
+
"c3afb59420c812cbc7c8f57ad3e8d79407f10106a99f829aa65316c99d0b29c4",
|
43 |
+
"2b808858836a5c205080f5b93201ef92e098cff931d8de6d9f20dc722997d077",
|
44 |
+
"07bef89d1a7d63c9c5ed64ba0f73d6cff689811847c2e20c8b3fbfb060e1d64e",
|
45 |
+
"baeb994922d5473f534aa54322d83effe74c6c4dac807e6b523a677d7acdc17b",
|
46 |
+
"ea4735a879edd5cc94ca7db26edd5a970df69a41f0009d3444486647e44175af",
|
47 |
+
"f2412249030454cd13ac6f7965871d924c16daacda0123de81892adb19ce49ac",
|
48 |
+
"9958c56e12bab8549cf752bcd8bec4ac36cf79c404b1faf5611f057bb71bc0e1",
|
49 |
+
"76cdade0b3d4caf0888f60318a5cbca00f830a3b0bf37735fc64fdaeb67c34d3",
|
50 |
+
"1bf53c97869e1ea89bda19da64a9173d48fe4ec823e949e2c898f8abb3fbf457",
|
51 |
+
"1bf53c97869e1ea89bda19da64a9173d48fe4ec823e949e2c898f8abb3fbf457",
|
52 |
+
"3d7f973fab8f4a19c0a3e59efe970ed7bd55a1cb795752d9cbe3c19e8a7d81ec"
|
53 |
]
|
54 |
|
55 |
+
const banned = [
|
56 |
+
"8a05d4869d9d6ce388c6cd2db13ca12b88097b90f9be027d5ffaaa467c7a6e5e",
|
57 |
+
"0c475212a608138244c5fc150b1563e5ef79c516234fd78dcd5993f726c359a0",
|
58 |
+
"df17388805f99f2ff3e5ae97a0f55e5c927eb47f17ca65822bf8c88f02bac3dd",
|
59 |
+
"86c3355d1bd581cdf7306729d8dd0ee9b7a317b9cfd6d7a6f5fad9c0dafe2167",
|
60 |
+
"23a2484cd420c9ffbfcc2c0075a9b330664450ced1fc64ab6a65e278086b8c6e",
|
61 |
+
"fb4cabe709b62eea1b4cc0030c76f5e4a43ee677ce19124e8e7bafa86c78ab66",
|
62 |
+
"d99c26daee85f7dc81c46c061a5874cff7179ed72d884d2316d664d36ffe7ab5",
|
63 |
+
"b93c38af5aa221d76c60ee3eb762efee0cdb0daf29ceb235b7dda6d46c06490d",
|
64 |
+
"8cf6c8765dc757319461dd9a785e77c201b8e5a604d36b817cd987c6a5e62500",
|
65 |
+
"f4a1cb290745717f86c3cee30fc324c0d80a9945fcbc7bbeb010579f58792f1e",
|
66 |
+
"7c87c47c42fc983119551342be9ddd5b32e530c0504ccdbbaa1e12b1d9f1bbcb",
|
67 |
+
"d04fad4f21d030da7a1301afbf480ef6246eb7bbf0f26e31865b2e015a25f747",
|
68 |
+
"d685ff22fb9da01ee949db212770729603989850864ef7a7085e1f086cfa7deb",
|
69 |
+
"533b90588d9ccf7967da54691f575e9fd4926c6e0b5fd94a47b932bcea270bee",
|
70 |
+
"9c2d61f28f5bb7f3f1dc9122be64cda8a428b46ce68b70120da4c41dba96ba4c",
|
71 |
+
"5d4b1a3eebe64dfa631d0e3b084bd96ee9364c3669269f838ca17a4900276264",
|
72 |
+
"d56f56413b9679fc0820a2c0237224ded8554c61fab8959c174123c8b68ba029",
|
73 |
+
"323a9ab60739726070d615ff3a05d7ff6bb6e3c4dd9ff16ce24f253ecd7b8851",
|
74 |
+
"975c6739de7d4999db15972f707f5f4e95649275f1c0c48e895b8c537e8638ec",
|
75 |
+
"67ee26eb9e1c1c7124797321b02bca90a19c18171782917cd4a487b722484dce",
|
76 |
+
"6df5aa7b72a4e6e3fb726489ff1437daa5752047507f4da912680b1d6647c7d6",
|
77 |
+
"b0864805364359e8c5810c233b1bf2c74dedce9055ae5f7680ba05b4e39db8e2",
|
78 |
+
"a8f841472ecffdd6266151148320c8e36847a24ead9d3338e0313b075c16649d",
|
79 |
+
"f9b127cd90e85b0ff68dd220361671663f0154b2b827f1f7ea797b020ca0018c",
|
80 |
+
"d5c20e9a1ecf01c82da24c514d867498b3e5f522adc1523ce29404a6563641d5",
|
81 |
+
"241022b49d7c0aba24a61eea1137a804f36e4bcb47af42950275baac9b4e7aac",
|
82 |
+
"fc99a70e17b6c86ef1b537654b0f50353567a7b59912c3ba955f3fca4d1ea696",
|
83 |
+
"255306e968009003d295cb2a7256f27bfcdb5d1743bf4d9f2aa4b8adf1a7734d",
|
84 |
+
"048c7b709763dd9c43794d241c369f0abcb079d546ddcbbba9968a1ed1da7ed7",
|
85 |
+
"520cbfeef3e4c405d79478eedccb97a4d476be585626dd2b1c53292797491bc7",
|
86 |
+
"f9f28a7ae7e8b1719b350a04dc087a4b8e33478d109ceeef6ba892b32d1105c9",
|
87 |
+
"d177f1bfe603647ef4c1c0e6f1a7172081fb9bbc2ea859705949f2c5aa5d4f22",
|
88 |
+
"302feef2c09247fbd23789581f7f5e2219f88ae0a937880954938573c2a52a84",
|
89 |
+
"99edd6f57b864873835f16f19c805dd94bed9da8967b84e3a62782f106d9ebcc",
|
90 |
+
"e75e5f01dcd8351c9553e89558085bd68e6feb295dee5d8da0c9b43ee303ce36",
|
91 |
+
"135e52a026aea9d2e12de358a85e05cf21121a18269269b7c62678c3bc846f5b",
|
92 |
+
"28e5b2d3eb5f1ef4cc7b570878b03acf303a6ca4ca95893591e0fb943b0beab0",
|
93 |
+
"a26b26340f8d0363633490556d20bcc250726d10e1431eb8c22d6b1ff3f2b14a",
|
94 |
+
"27e4ddde96ec6a1dbe1cf12d79448b3e72f144944c15b299629542d1b65fbabf",
|
95 |
+
"efd9c0a391ee93251046a58326d1b21b33fe21d71a3fb1855b9048ade53df77c",
|
96 |
+
"6d505fcce416c26a606878aab4d249a034ba2a9846cb1f883e0f9e3fb76ba6da",
|
97 |
+
"3a37b8a1b72f9bca51233536d50f9c8d33a787434684787871e0049c82347cda",
|
98 |
+
"16f9b451184a7c3148344c7d0315f5312ca20553d2271912ecaad91810d977e6",
|
99 |
+
"7406537eb74d1885bd05e191228de313b13702a64d90ae1736c6377b25ab579a",
|
100 |
+
"7e4d1395ae18980015cab16c85ffa20b4cb90a2db594126e893d0f7ac6eecaa8",
|
101 |
+
"ba813ee6c25698f0f68a07121d38bb47c9aa404c1ab0a6e767595cb75e1747b8",
|
102 |
+
"6586c93f3ece83e01ecc1eb84a7711e7975826a388d478a009468ea0ed9dc03e",
|
103 |
+
"8960174c74d86e03ae88fb6774580170e49952f2286d960be08c556bbd0dda95",
|
104 |
+
"4d611454369aa1a4e2b7eed1734fac5d480f08fb86b87a162967e416370f2a8e",
|
105 |
+
"59d48440f85eabf565fe8d3bc6b973ba64c70df3b36b0511e0e67ceca91762b3",
|
106 |
+
"cd926926e2af74e43d1a6a420a7e1933b78662320477a3c018b2711d8765e339",
|
107 |
+
"80e90057df6a59823f51aafac36ed5bc4e5ac26d675d9c1467501590c82f12d4",
|
108 |
+
"a9cf28b869b70e258adde5639a048f866ec86f8f3f3d53bfc960b86aa6da9239",
|
109 |
+
"cc2adbf8ac0cddeefa304d7b20f14a7e047a4b2299cc5e8f898f5c59660bd964",
|
110 |
+
"92a150a46146e9d3f84899cf15e12514af684e7ee18d7add782ddd4f4a15ef18",
|
111 |
+
"d9b2e84ef6dc0ce449357d52c9095f69b173a1b848ea2921199d33b0ec10024a",
|
112 |
+
"a9329a7e4d367a0135c1ca86c6ce5ecabcc26529235229d71b6bf991f7689e21",
|
113 |
+
"8f160c6fd8ccc3fb2a371a4b52748f0bd030766627c4322e2911fe82f6b10497",
|
114 |
+
"620e96eae4f3e88cbe0770292b33724c5df3866d83f39df6380441f7271c80e2",
|
115 |
+
"cafa3481fa3c45ed1e55cd0129c12b477eeab5aa3d6da20cae6d6292f19b0e6d",
|
116 |
+
"be07994e9a83aa3689e79b6e96123676ccc4fa29f523c28c750c6d60505531ee",
|
117 |
+
"f6498069768cd3aa79b2b0c91879694f05a259c8ee4a6bb343f0435f74eb1b53",
|
118 |
+
"c9b6b26cb3a694eb78fcac0a14ad18d46d50907186a9add41022d31d191b2b65"
|
119 |
+
]
|
120 |
+
|
121 |
+
const young = [
|
122 |
+
"ffdf66787b4a33b78b18c18822e334cfe2c8406caf442851deef451bd43140a1",
|
123 |
+
"858f22219afc4b32a7ba9a27a213d7f495e77c3cceed8147eae5282bf3e23d39",
|
124 |
+
"8c3c46df84ace3d58d4ce0fbc513017986b33c6002ae369d9f7dd1f892a898cb",
|
125 |
+
"66caa22b9483fdf026ce67de61067d81535a7c9b3169cbc5c2a455ac8dcc7bec",
|
126 |
+
"76893047b1eff9fadc7be07b13adb5aaed9c73bcdeea46ee07098605e2c7ff76",
|
127 |
+
"526cb848754e2baaa17376a5693d90ba3f69f71fd2a866f22876ac8a075849a7",
|
128 |
+
"f59c38e31d0f64dc1bfcdf34451723bc1a65570e209e5496c8d1d7f6d3d649db",
|
129 |
+
"e013a67e275c62c1402ccbbb11ad14afb8b8a82318a44c07d67599ed5ac874de",
|
130 |
+
"3bef34219fb07f867ecbff4d6748f598d6cc0761e17dd0d431ee1f4ec3281374",
|
131 |
+
"8211bf5f613fac06cd5d074d34c16dfacc9367c8afaa6ad3aff99d145e5221be"
|
132 |
+
]
|
133 |
+
|
134 |
+
const getFingerprint = (word: string) => {
|
135 |
+
return computeSecretFingerprint(
|
136 |
+
word.toLocaleLowerCase().replaceAll(/[^a-zA-Z0-9]/gi, "")
|
137 |
+
)
|
138 |
+
}
|
139 |
+
|
140 |
+
const encode = (list: string[]) => {
|
141 |
+
console.log(JSON.stringify(
|
142 |
+
list.sort((a, b) => (b.length - a.length))
|
143 |
+
.map(item => getFingerprint(item)), null, 2))
|
144 |
+
}
|
145 |
+
|
146 |
+
// encode([ "badword" ])
|
147 |
+
|
148 |
export const filterOutBadWords = (sentence: string) => {
|
149 |
+
if (process.env.ENABLE_CENSORSHIP !== "true") { return sentence }
|
150 |
+
|
151 |
+
let requireCensorship = false
|
152 |
+
|
153 |
+
const words = sentence.replaceAll(/[^a-zA-Z0-9]/gi, " ").replaceAll(/\s+/gi, " ").trim().split(" ")
|
154 |
+
|
155 |
+
const sanitized = words.map(word => {
|
156 |
+
const fingerprint = getFingerprint(word)
|
157 |
+
|
158 |
+
let result: string = word
|
159 |
+
// some users want to play it smart and bypass our system so let's play too
|
160 |
+
if (chickens.includes(fingerprint)) {
|
161 |
+
result = "large chicken"
|
162 |
+
} else if (ducks.includes(fingerprint)) {
|
163 |
+
result = "big duck"
|
164 |
+
} else if (cats.includes(fingerprint)) {
|
165 |
+
result = "cat"
|
166 |
+
} else if (roasted.includes(fingerprint)) {
|
167 |
+
result = "roasted chicken"
|
168 |
+
} else if (young.includes(fingerprint)) {
|
169 |
+
result = "adult"
|
170 |
+
} else if (banned.includes(fingerprint)) {
|
171 |
+
result = "_BANNED_"
|
172 |
+
}
|
173 |
+
|
174 |
+
if (result !== word) {
|
175 |
+
requireCensorship = true
|
176 |
+
}
|
177 |
+
return result
|
178 |
+
}).filter(item => item !== "_BANNED_").join(" ")
|
179 |
+
|
180 |
+
// if the user didn't try to use a bad word, we leave it untouched
|
181 |
+
// he words array has been degraded by the replace operation, but it removes commas etc which isn't great
|
182 |
+
// so if the request was genuine and SFW, it's best to return the original prompt
|
183 |
+
return requireCensorship ? sanitized : sentence
|
184 |
}
|
src/app/engine/render.ts
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
"use server"
|
2 |
|
3 |
-
import
|
|
|
4 |
|
5 |
import { RenderRequest, RenderedScene, RenderingEngine } from "@/types"
|
6 |
import { generateSeed } from "@/lib/generateSeed"
|
@@ -8,13 +9,17 @@ import { sleep } from "@/lib/sleep"
|
|
8 |
|
9 |
const renderingEngine = `${process.env.RENDERING_ENGINE || ""}` as RenderingEngine
|
10 |
|
11 |
-
|
12 |
-
const
|
13 |
-
const
|
|
|
14 |
|
15 |
-
|
16 |
-
|
17 |
-
const
|
|
|
|
|
|
|
18 |
|
19 |
export async function newRender({
|
20 |
prompt,
|
@@ -27,10 +32,10 @@ export async function newRender({
|
|
27 |
width: number
|
28 |
height: number
|
29 |
}) {
|
30 |
-
// console.log(`newRender(${prompt})`)
|
31 |
if (!prompt) {
|
32 |
-
|
33 |
-
|
|
|
34 |
}
|
35 |
|
36 |
let defaulResult: RenderedScene = {
|
@@ -61,12 +66,23 @@ export async function newRender({
|
|
61 |
const seed = generateSeed()
|
62 |
const prediction = await replicate.predictions.create({
|
63 |
version: replicateModelVersion,
|
64 |
-
input: {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
})
|
66 |
|
67 |
// console.log("prediction:", prediction)
|
68 |
|
69 |
-
// no need to reply straight away
|
70 |
// also our friends at Replicate won't like it if we spam them with requests
|
71 |
await sleep(4000)
|
72 |
|
@@ -79,14 +95,88 @@ export async function newRender({
|
|
79 |
maskUrl: "",
|
80 |
segments: []
|
81 |
} as RenderedScene
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
82 |
} else {
|
83 |
-
|
84 |
-
const res = await fetch(`${apiUrl}/render`, {
|
85 |
method: "POST",
|
86 |
headers: {
|
87 |
Accept: "application/json",
|
88 |
"Content-Type": "application/json",
|
89 |
-
Authorization: `Bearer ${
|
90 |
},
|
91 |
body: JSON.stringify({
|
92 |
prompt,
|
@@ -114,14 +204,7 @@ export async function newRender({
|
|
114 |
// next: { revalidate: 1 }
|
115 |
})
|
116 |
|
117 |
-
|
118 |
-
// console.log("res:", res)
|
119 |
-
// The return value is *not* serialized
|
120 |
-
// You can return Date, Map, Set, etc.
|
121 |
-
|
122 |
-
// Recommendation: handle errors
|
123 |
if (res.status !== 200) {
|
124 |
-
// This will activate the closest `error.js` Error Boundary
|
125 |
throw new Error('Failed to fetch data')
|
126 |
}
|
127 |
|
@@ -136,8 +219,9 @@ export async function newRender({
|
|
136 |
|
137 |
export async function getRender(renderId: string) {
|
138 |
if (!renderId) {
|
139 |
-
|
140 |
-
|
|
|
141 |
}
|
142 |
|
143 |
let defaulResult: RenderedScene = {
|
@@ -153,24 +237,15 @@ export async function getRender(renderId: string) {
|
|
153 |
try {
|
154 |
if (renderingEngine === "REPLICATE") {
|
155 |
if (!replicateToken) {
|
156 |
-
throw new Error(`you need to configure your
|
157 |
}
|
158 |
if (!replicateModel) {
|
159 |
-
throw new Error(`you need to configure your
|
160 |
}
|
161 |
|
162 |
-
// const replicate = new Replicate({ auth: replicateToken })
|
163 |
-
|
164 |
-
// console.log("Calling replicate..")
|
165 |
-
// const prediction = await replicate.predictions.get(renderId)
|
166 |
-
// console.log("Prediction:", prediction)
|
167 |
-
|
168 |
-
// console.log(`calling GET https://api.replicate.com/v1/predictions/${renderId}`)
|
169 |
const res = await fetch(`https://api.replicate.com/v1/predictions/${renderId}`, {
|
170 |
method: "GET",
|
171 |
headers: {
|
172 |
-
// Accept: "application/json",
|
173 |
-
// "Content-Type": "application/json",
|
174 |
Authorization: `Token ${replicateToken}`,
|
175 |
},
|
176 |
cache: 'no-store',
|
@@ -178,10 +253,6 @@ export async function getRender(renderId: string) {
|
|
178 |
// next: { revalidate: 1 }
|
179 |
})
|
180 |
|
181 |
-
// console.log("res:", res)
|
182 |
-
// The return value is *not* serialized
|
183 |
-
// You can return Date, Map, Set, etc.
|
184 |
-
|
185 |
// Recommendation: handle errors
|
186 |
if (res.status !== 200) {
|
187 |
// This will activate the closest `error.js` Error Boundary
|
@@ -189,7 +260,6 @@ export async function getRender(renderId: string) {
|
|
189 |
}
|
190 |
|
191 |
const response = (await res.json()) as any
|
192 |
-
// console.log("response:", response)
|
193 |
|
194 |
return {
|
195 |
renderId,
|
@@ -202,41 +272,31 @@ export async function getRender(renderId: string) {
|
|
202 |
} as RenderedScene
|
203 |
} else {
|
204 |
// console.log(`calling GET ${apiUrl}/render with renderId: ${renderId}`)
|
205 |
-
const res = await fetch(`${
|
206 |
method: "GET",
|
207 |
headers: {
|
208 |
Accept: "application/json",
|
209 |
"Content-Type": "application/json",
|
210 |
-
Authorization: `Bearer ${
|
211 |
},
|
212 |
cache: 'no-store',
|
213 |
// we can also use this (see https://vercel.com/blog/vercel-cache-api-nextjs-cache)
|
214 |
// next: { revalidate: 1 }
|
215 |
})
|
216 |
-
|
217 |
-
// console.log("res:", res)
|
218 |
-
// The return value is *not* serialized
|
219 |
-
// You can return Date, Map, Set, etc.
|
220 |
|
221 |
-
// Recommendation: handle errors
|
222 |
if (res.status !== 200) {
|
223 |
-
// This will activate the closest `error.js` Error Boundary
|
224 |
throw new Error('Failed to fetch data')
|
225 |
}
|
226 |
|
227 |
const response = (await res.json()) as RenderedScene
|
228 |
-
// console.log("response:", response)
|
229 |
return response
|
230 |
}
|
231 |
} catch (err) {
|
232 |
console.error(err)
|
233 |
defaulResult.status = "error"
|
234 |
defaulResult.error = `${err}`
|
235 |
-
// Gorgon.clear(cacheKey)
|
236 |
return defaulResult
|
237 |
}
|
238 |
-
|
239 |
-
// }, cacheDurationInSec * 1000)
|
240 |
}
|
241 |
|
242 |
export async function upscaleImage(image: string): Promise<{
|
@@ -244,8 +304,9 @@ export async function upscaleImage(image: string): Promise<{
|
|
244 |
error: string
|
245 |
}> {
|
246 |
if (!image) {
|
247 |
-
|
248 |
-
|
|
|
249 |
}
|
250 |
|
251 |
let defaulResult = {
|
@@ -255,12 +316,12 @@ export async function upscaleImage(image: string): Promise<{
|
|
255 |
|
256 |
try {
|
257 |
// console.log(`calling GET ${apiUrl}/render with renderId: ${renderId}`)
|
258 |
-
const res = await fetch(`${
|
259 |
method: "POST",
|
260 |
headers: {
|
261 |
Accept: "application/json",
|
262 |
"Content-Type": "application/json",
|
263 |
-
Authorization: `Bearer ${
|
264 |
},
|
265 |
cache: 'no-store',
|
266 |
body: JSON.stringify({ image, factor: 3 })
|
@@ -268,13 +329,7 @@ export async function upscaleImage(image: string): Promise<{
|
|
268 |
// next: { revalidate: 1 }
|
269 |
})
|
270 |
|
271 |
-
// console.log("res:", res)
|
272 |
-
// The return value is *not* serialized
|
273 |
-
// You can return Date, Map, Set, etc.
|
274 |
-
|
275 |
-
// Recommendation: handle errors
|
276 |
if (res.status !== 200) {
|
277 |
-
// This will activate the closest `error.js` Error Boundary
|
278 |
throw new Error('Failed to fetch data')
|
279 |
}
|
280 |
|
@@ -282,13 +337,9 @@ export async function upscaleImage(image: string): Promise<{
|
|
282 |
assetUrl: string
|
283 |
error: string
|
284 |
}
|
285 |
-
// console.log("response:", response)
|
286 |
return response
|
287 |
} catch (err) {
|
288 |
console.error(err)
|
289 |
-
// Gorgon.clear(cacheKey)
|
290 |
return defaulResult
|
291 |
}
|
292 |
-
|
293 |
-
// }, cacheDurationInSec * 1000)
|
294 |
}
|
|
|
1 |
"use server"
|
2 |
|
3 |
+
import { v4 as uuidv4 } from "uuid"
|
4 |
+
import Replicate from "replicate"
|
5 |
|
6 |
import { RenderRequest, RenderedScene, RenderingEngine } from "@/types"
|
7 |
import { generateSeed } from "@/lib/generateSeed"
|
|
|
9 |
|
10 |
const renderingEngine = `${process.env.RENDERING_ENGINE || ""}` as RenderingEngine
|
11 |
|
12 |
+
// TODO: we should split Hugging Face and Replicate backends into separate files
|
13 |
+
const huggingFaceToken = `${process.env.AUTH_HF_API_TOKEN || ""}`
|
14 |
+
const huggingFaceInferenceEndpointUrl = `${process.env.RENDERING_HF_INFERENCE_ENDPOINT_URL || ""}`
|
15 |
+
const huggingFaceInferenceApiModel = `${process.env.RENDERING_HF_INFERENCE_API_MODEL || ""}`
|
16 |
|
17 |
+
const replicateToken = `${process.env.AUTH_REPLICATE_API_TOKEN || ""}`
|
18 |
+
const replicateModel = `${process.env.RENDERING_REPLICATE_API_MODEL || ""}`
|
19 |
+
const replicateModelVersion = `${process.env.RENDERING_REPLICATE_API_MODEL_VERSION || ""}`
|
20 |
+
|
21 |
+
const videochainToken = `${process.env.AUTH_VIDEOCHAIN_API_TOKEN || ""}`
|
22 |
+
const videochainApiUrl = `${process.env.RENDERING_VIDEOCHAIN_API_URL || ""}`
|
23 |
|
24 |
export async function newRender({
|
25 |
prompt,
|
|
|
32 |
width: number
|
33 |
height: number
|
34 |
}) {
|
|
|
35 |
if (!prompt) {
|
36 |
+
const error = `cannot call the rendering API without a prompt, aborting..`
|
37 |
+
console.error(error)
|
38 |
+
throw new Error(error)
|
39 |
}
|
40 |
|
41 |
let defaulResult: RenderedScene = {
|
|
|
66 |
const seed = generateSeed()
|
67 |
const prediction = await replicate.predictions.create({
|
68 |
version: replicateModelVersion,
|
69 |
+
input: {
|
70 |
+
prompt: [
|
71 |
+
"beautiful",
|
72 |
+
"intricate details",
|
73 |
+
prompt,
|
74 |
+
"award winning",
|
75 |
+
"high resolution"
|
76 |
+
].join(", "),
|
77 |
+
width,
|
78 |
+
height,
|
79 |
+
seed
|
80 |
+
}
|
81 |
})
|
82 |
|
83 |
// console.log("prediction:", prediction)
|
84 |
|
85 |
+
// no need to reply straight away as images take time to generate, this isn't instantaneous
|
86 |
// also our friends at Replicate won't like it if we spam them with requests
|
87 |
await sleep(4000)
|
88 |
|
|
|
95 |
maskUrl: "",
|
96 |
segments: []
|
97 |
} as RenderedScene
|
98 |
+
} if (renderingEngine === "INFERENCE_ENDPOINT" || renderingEngine === "INFERENCE_API") {
|
99 |
+
if (!huggingFaceToken) {
|
100 |
+
throw new Error(`you need to configure your HF_API_TOKEN in order to use the ${renderingEngine} rendering engine`)
|
101 |
+
}
|
102 |
+
if (renderingEngine === "INFERENCE_ENDPOINT" && !huggingFaceInferenceEndpointUrl) {
|
103 |
+
throw new Error(`you need to configure your RENDERING_HF_INFERENCE_ENDPOINT_URL in order to use the INFERENCE_ENDPOINT rendering engine`)
|
104 |
+
}
|
105 |
+
if (renderingEngine === "INFERENCE_API" && !huggingFaceInferenceApiModel) {
|
106 |
+
throw new Error(`you need to configure your RENDERING_HF_INFERENCE_API_MODEL in order to use the INFERENCE_API rendering engine`)
|
107 |
+
}
|
108 |
+
|
109 |
+
const url = renderingEngine === "INFERENCE_ENDPOINT"
|
110 |
+
? huggingFaceInferenceEndpointUrl
|
111 |
+
: `https://api-inference.huggingface.co/models/${huggingFaceInferenceApiModel}`
|
112 |
+
|
113 |
+
/*
|
114 |
+
console.log(`calling ${url} with params: `, {
|
115 |
+
num_inference_steps: 25,
|
116 |
+
guidance_scale: 8,
|
117 |
+
width,
|
118 |
+
height,
|
119 |
+
})
|
120 |
+
*/
|
121 |
+
|
122 |
+
const res = await fetch(url, {
|
123 |
+
method: "POST",
|
124 |
+
headers: {
|
125 |
+
"Content-Type": "application/json",
|
126 |
+
Authorization: `Bearer ${huggingFaceToken}`,
|
127 |
+
},
|
128 |
+
body: JSON.stringify({
|
129 |
+
inputs: [
|
130 |
+
"beautiful",
|
131 |
+
"intricate details",
|
132 |
+
prompt,
|
133 |
+
"award winning",
|
134 |
+
"high resolution"
|
135 |
+
].join(", "),
|
136 |
+
parameters: {
|
137 |
+
num_inference_steps: 25,
|
138 |
+
guidance_scale: 8,
|
139 |
+
width,
|
140 |
+
height,
|
141 |
+
},
|
142 |
+
use_cache: false,
|
143 |
+
}),
|
144 |
+
cache: "no-store",
|
145 |
+
// we can also use this (see https://vercel.com/blog/vercel-cache-api-nextjs-cache)
|
146 |
+
// next: { revalidate: 1 }
|
147 |
+
})
|
148 |
+
|
149 |
+
|
150 |
+
// Recommendation: handle errors
|
151 |
+
if (res.status !== 200) {
|
152 |
+
const content = await res.text()
|
153 |
+
console.error(content)
|
154 |
+
// This will activate the closest `error.js` Error Boundary
|
155 |
+
throw new Error('Failed to fetch data')
|
156 |
+
}
|
157 |
+
|
158 |
+
const blob = await res.arrayBuffer()
|
159 |
+
|
160 |
+
const contentType = res.headers.get('content-type')
|
161 |
+
|
162 |
+
const assetUrl = `data:${contentType};base64,${Buffer.from(blob).toString('base64')}`
|
163 |
+
|
164 |
+
return {
|
165 |
+
renderId: uuidv4(),
|
166 |
+
status: "completed",
|
167 |
+
assetUrl,
|
168 |
+
alt: prompt,
|
169 |
+
error: "",
|
170 |
+
maskUrl: "",
|
171 |
+
segments: []
|
172 |
+
} as RenderedScene
|
173 |
} else {
|
174 |
+
const res = await fetch(`${videochainApiUrl}/render`, {
|
|
|
175 |
method: "POST",
|
176 |
headers: {
|
177 |
Accept: "application/json",
|
178 |
"Content-Type": "application/json",
|
179 |
+
Authorization: `Bearer ${videochainToken}`,
|
180 |
},
|
181 |
body: JSON.stringify({
|
182 |
prompt,
|
|
|
204 |
// next: { revalidate: 1 }
|
205 |
})
|
206 |
|
|
|
|
|
|
|
|
|
|
|
|
|
207 |
if (res.status !== 200) {
|
|
|
208 |
throw new Error('Failed to fetch data')
|
209 |
}
|
210 |
|
|
|
219 |
|
220 |
export async function getRender(renderId: string) {
|
221 |
if (!renderId) {
|
222 |
+
const error = `cannot call the rendering API without a renderId, aborting..`
|
223 |
+
console.error(error)
|
224 |
+
throw new Error(error)
|
225 |
}
|
226 |
|
227 |
let defaulResult: RenderedScene = {
|
|
|
237 |
try {
|
238 |
if (renderingEngine === "REPLICATE") {
|
239 |
if (!replicateToken) {
|
240 |
+
throw new Error(`you need to configure your AUTH_REPLICATE_API_TOKEN in order to use the REPLICATE rendering engine`)
|
241 |
}
|
242 |
if (!replicateModel) {
|
243 |
+
throw new Error(`you need to configure your RENDERING_REPLICATE_API_MODEL in order to use the REPLICATE rendering engine`)
|
244 |
}
|
245 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
246 |
const res = await fetch(`https://api.replicate.com/v1/predictions/${renderId}`, {
|
247 |
method: "GET",
|
248 |
headers: {
|
|
|
|
|
249 |
Authorization: `Token ${replicateToken}`,
|
250 |
},
|
251 |
cache: 'no-store',
|
|
|
253 |
// next: { revalidate: 1 }
|
254 |
})
|
255 |
|
|
|
|
|
|
|
|
|
256 |
// Recommendation: handle errors
|
257 |
if (res.status !== 200) {
|
258 |
// This will activate the closest `error.js` Error Boundary
|
|
|
260 |
}
|
261 |
|
262 |
const response = (await res.json()) as any
|
|
|
263 |
|
264 |
return {
|
265 |
renderId,
|
|
|
272 |
} as RenderedScene
|
273 |
} else {
|
274 |
// console.log(`calling GET ${apiUrl}/render with renderId: ${renderId}`)
|
275 |
+
const res = await fetch(`${videochainApiUrl}/render/${renderId}`, {
|
276 |
method: "GET",
|
277 |
headers: {
|
278 |
Accept: "application/json",
|
279 |
"Content-Type": "application/json",
|
280 |
+
Authorization: `Bearer ${videochainToken}`,
|
281 |
},
|
282 |
cache: 'no-store',
|
283 |
// we can also use this (see https://vercel.com/blog/vercel-cache-api-nextjs-cache)
|
284 |
// next: { revalidate: 1 }
|
285 |
})
|
|
|
|
|
|
|
|
|
286 |
|
|
|
287 |
if (res.status !== 200) {
|
|
|
288 |
throw new Error('Failed to fetch data')
|
289 |
}
|
290 |
|
291 |
const response = (await res.json()) as RenderedScene
|
|
|
292 |
return response
|
293 |
}
|
294 |
} catch (err) {
|
295 |
console.error(err)
|
296 |
defaulResult.status = "error"
|
297 |
defaulResult.error = `${err}`
|
|
|
298 |
return defaulResult
|
299 |
}
|
|
|
|
|
300 |
}
|
301 |
|
302 |
export async function upscaleImage(image: string): Promise<{
|
|
|
304 |
error: string
|
305 |
}> {
|
306 |
if (!image) {
|
307 |
+
const error = `cannot call the rendering API without an image, aborting..`
|
308 |
+
console.error(error)
|
309 |
+
throw new Error(error)
|
310 |
}
|
311 |
|
312 |
let defaulResult = {
|
|
|
316 |
|
317 |
try {
|
318 |
// console.log(`calling GET ${apiUrl}/render with renderId: ${renderId}`)
|
319 |
+
const res = await fetch(`${videochainApiUrl}/upscale`, {
|
320 |
method: "POST",
|
321 |
headers: {
|
322 |
Accept: "application/json",
|
323 |
"Content-Type": "application/json",
|
324 |
+
Authorization: `Bearer ${videochainToken}`,
|
325 |
},
|
326 |
cache: 'no-store',
|
327 |
body: JSON.stringify({ image, factor: 3 })
|
|
|
329 |
// next: { revalidate: 1 }
|
330 |
})
|
331 |
|
|
|
|
|
|
|
|
|
|
|
332 |
if (res.status !== 200) {
|
|
|
333 |
throw new Error('Failed to fetch data')
|
334 |
}
|
335 |
|
|
|
337 |
assetUrl: string
|
338 |
error: string
|
339 |
}
|
|
|
340 |
return response
|
341 |
} catch (err) {
|
342 |
console.error(err)
|
|
|
343 |
return defaulResult
|
344 |
}
|
|
|
|
|
345 |
}
|
src/app/interface/bottom-bar/index.tsx
CHANGED
@@ -101,11 +101,13 @@ ${uploadUrl
|
|
101 |
`print:hidden`,
|
102 |
`fixed bottom-2 md:bottom-4 left-2 right-0 md:left-3 md:right-1`,
|
103 |
`flex flex-row`,
|
104 |
-
`justify-between
|
|
|
105 |
)}>
|
106 |
<div className={cn(
|
107 |
`flex flex-row`,
|
108 |
`items-end`,
|
|
|
109 |
`animation-all duration-300 ease-in-out`,
|
110 |
isGeneratingStory ? `scale-0 opacity-0` : ``,
|
111 |
`space-x-3`,
|
@@ -115,12 +117,16 @@ ${uploadUrl
|
|
115 |
</div>
|
116 |
<div className={cn(
|
117 |
`flex flex-row`,
|
|
|
118 |
`animation-all duration-300 ease-in-out`,
|
119 |
isGeneratingStory ? `scale-0 opacity-0` : ``,
|
120 |
`space-x-3`,
|
121 |
`scale-[0.9]`
|
122 |
)}>
|
123 |
<div>
|
|
|
|
|
|
|
124 |
<Button
|
125 |
onClick={handleUpscale}
|
126 |
disabled={!prompt?.length || remainingImages > 0 || isUpscaling || !Object.values(upscaleQueue).length}
|
@@ -129,6 +135,8 @@ ${uploadUrl
|
|
129 |
? `${allStatus.length - Object.values(upscaleQueue).length}/${allStatus.length} ⌛`
|
130 |
: "Upscale"}
|
131 |
</Button>
|
|
|
|
|
132 |
</div>
|
133 |
<div>
|
134 |
<Button
|
@@ -152,6 +160,9 @@ ${uploadUrl
|
|
152 |
</Button>
|
153 |
</div>
|
154 |
<div>
|
|
|
|
|
|
|
155 |
<Button
|
156 |
onClick={handleShare}
|
157 |
disabled={!prompt?.length}
|
@@ -162,7 +173,9 @@ ${uploadUrl
|
|
162 |
<span className="hidden md:inline">Share to community</span>
|
163 |
<span className="inline md:hidden">Share</span>
|
164 |
</div>
|
165 |
-
</Button>
|
|
|
|
|
166 |
</div>
|
167 |
</div>
|
168 |
</div>
|
|
|
101 |
`print:hidden`,
|
102 |
`fixed bottom-2 md:bottom-4 left-2 right-0 md:left-3 md:right-1`,
|
103 |
`flex flex-row`,
|
104 |
+
`justify-between`,
|
105 |
+
`pointer-events-none`
|
106 |
)}>
|
107 |
<div className={cn(
|
108 |
`flex flex-row`,
|
109 |
`items-end`,
|
110 |
+
`pointer-events-auto`,
|
111 |
`animation-all duration-300 ease-in-out`,
|
112 |
isGeneratingStory ? `scale-0 opacity-0` : ``,
|
113 |
`space-x-3`,
|
|
|
117 |
</div>
|
118 |
<div className={cn(
|
119 |
`flex flex-row`,
|
120 |
+
`pointer-events-auto`,
|
121 |
`animation-all duration-300 ease-in-out`,
|
122 |
isGeneratingStory ? `scale-0 opacity-0` : ``,
|
123 |
`space-x-3`,
|
124 |
`scale-[0.9]`
|
125 |
)}>
|
126 |
<div>
|
127 |
+
{
|
128 |
+
// there is an issue, this env check doesn't work..
|
129 |
+
// process.env.NEXT_PUBLIC_CAN_UPSCALE === "true" ?
|
130 |
<Button
|
131 |
onClick={handleUpscale}
|
132 |
disabled={!prompt?.length || remainingImages > 0 || isUpscaling || !Object.values(upscaleQueue).length}
|
|
|
135 |
? `${allStatus.length - Object.values(upscaleQueue).length}/${allStatus.length} ⌛`
|
136 |
: "Upscale"}
|
137 |
</Button>
|
138 |
+
// : null
|
139 |
+
}
|
140 |
</div>
|
141 |
<div>
|
142 |
<Button
|
|
|
160 |
</Button>
|
161 |
</div>
|
162 |
<div>
|
163 |
+
{
|
164 |
+
// there is an issue, this env check doesn't work..
|
165 |
+
// process.env.NEXT_PUBLIC_ENABLE_COMMUNITY_SHARING === "true" ?
|
166 |
<Button
|
167 |
onClick={handleShare}
|
168 |
disabled={!prompt?.length}
|
|
|
173 |
<span className="hidden md:inline">Share to community</span>
|
174 |
<span className="inline md:hidden">Share</span>
|
175 |
</div>
|
176 |
+
</Button>
|
177 |
+
//: null
|
178 |
+
}
|
179 |
</div>
|
180 |
</div>
|
181 |
</div>
|
src/app/interface/panel/index.tsx
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use client"
|
2 |
|
3 |
import { useEffect, useRef, useState, useTransition } from "react"
|
4 |
-
|
5 |
|
6 |
import { RenderedScene } from "@/types"
|
7 |
|
@@ -12,9 +12,6 @@ import { cn } from "@/lib/utils"
|
|
12 |
import { getInitialRenderedScene } from "@/lib/getInitialRenderedScene"
|
13 |
import { Progress } from "@/app/interface/progress"
|
14 |
|
15 |
-
// import { see } from "@/app/engine/caption"
|
16 |
-
// import { replaceTextInSpeechBubbles } from "@/lib/replaceTextInSpeechBubbles"
|
17 |
-
|
18 |
export function Panel({
|
19 |
panel,
|
20 |
className = "",
|
@@ -28,13 +25,13 @@ export function Panel({
|
|
28 |
}) {
|
29 |
const panelId = `${panel}`
|
30 |
|
|
|
31 |
const ref = useRef<HTMLImageElement>(null)
|
32 |
const font = useStore(state => state.font)
|
33 |
const preset = useStore(state => state.preset)
|
34 |
|
35 |
const setGeneratingImages = useStore(state => state.setGeneratingImages)
|
36 |
|
37 |
-
const [imageWithText, setImageWithText] = useState("")
|
38 |
const panels = useStore(state => state.panels)
|
39 |
const prompt = panels[panel] || ""
|
40 |
|
@@ -59,11 +56,12 @@ export function Panel({
|
|
59 |
|
60 |
const timeoutRef = useRef<any>(null)
|
61 |
|
62 |
-
const
|
63 |
|
64 |
-
|
65 |
-
|
66 |
-
|
|
|
67 |
// console.log("Panel prompt: "+ prompt)
|
68 |
if (!prompt?.length) { return }
|
69 |
|
@@ -76,36 +74,47 @@ export function Panel({
|
|
76 |
setTimeout(() => {
|
77 |
startTransition(async () => {
|
78 |
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
}, [prompt, width, height])
|
110 |
|
111 |
|
@@ -208,27 +217,10 @@ export function Panel({
|
|
208 |
`print:border-[1.5px] print:shadow-none`,
|
209 |
)
|
210 |
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
const fn = async () => {
|
216 |
-
if (!rendered.assetUrl || !ref.current) {
|
217 |
-
return
|
218 |
-
}
|
219 |
-
|
220 |
-
const result = await replaceTextInSpeechBubbles(
|
221 |
-
rendered.assetUrl,
|
222 |
-
"Lorem ipsum dolor sit amet, dolor ipsum. Sit amet? Ipsum! Dolor!!!"
|
223 |
-
)
|
224 |
-
if (result) {
|
225 |
-
setImageWithText(result)
|
226 |
-
}
|
227 |
-
}
|
228 |
-
fn()
|
229 |
-
|
230 |
-
}, [rendered.assetUrl, ref.current])
|
231 |
-
*/
|
232 |
|
233 |
if (prompt && !rendered.assetUrl) {
|
234 |
return (
|
@@ -247,9 +239,11 @@ export function Panel({
|
|
247 |
frameClassName,
|
248 |
{ "grayscale": preset.color === "grayscale" },
|
249 |
className
|
250 |
-
)}
|
|
|
|
|
|
|
251 |
<div className={cn(
|
252 |
-
``,
|
253 |
`bg-stone-50`,
|
254 |
`border-stone-800`,
|
255 |
`transition-all duration-200 ease-in-out`,
|
@@ -289,7 +283,7 @@ export function Panel({
|
|
289 |
{rendered.assetUrl &&
|
290 |
<img
|
291 |
ref={ref}
|
292 |
-
src={
|
293 |
width={width}
|
294 |
height={height}
|
295 |
alt={rendered.alt}
|
@@ -298,6 +292,31 @@ export function Panel({
|
|
298 |
// showCaptions ? `-mt-11` : ''
|
299 |
)}
|
300 |
/>}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
301 |
</div>
|
302 |
)
|
303 |
}
|
|
|
1 |
"use client"
|
2 |
|
3 |
import { useEffect, useRef, useState, useTransition } from "react"
|
4 |
+
import { RxReload } from "react-icons/rx"
|
5 |
|
6 |
import { RenderedScene } from "@/types"
|
7 |
|
|
|
12 |
import { getInitialRenderedScene } from "@/lib/getInitialRenderedScene"
|
13 |
import { Progress } from "@/app/interface/progress"
|
14 |
|
|
|
|
|
|
|
15 |
export function Panel({
|
16 |
panel,
|
17 |
className = "",
|
|
|
25 |
}) {
|
26 |
const panelId = `${panel}`
|
27 |
|
28 |
+
const [mouseOver, setMouseOver] = useState(false)
|
29 |
const ref = useRef<HTMLImageElement>(null)
|
30 |
const font = useStore(state => state.font)
|
31 |
const preset = useStore(state => state.preset)
|
32 |
|
33 |
const setGeneratingImages = useStore(state => state.setGeneratingImages)
|
34 |
|
|
|
35 |
const panels = useStore(state => state.panels)
|
36 |
const prompt = panels[panel] || ""
|
37 |
|
|
|
56 |
|
57 |
const timeoutRef = useRef<any>(null)
|
58 |
|
59 |
+
const enableRateLimiter = `${process.env.NEXT_PUBLIC_ENABLE_RATE_LIMITER}` === "true"
|
60 |
|
61 |
+
const delay = enableRateLimiter ? (3000 + (1000 * panel)) : 1000
|
62 |
+
|
63 |
+
|
64 |
+
const startImageGeneration = ({ prompt, width, height }: { prompt: string, width: number, height: number}) => {
|
65 |
// console.log("Panel prompt: "+ prompt)
|
66 |
if (!prompt?.length) { return }
|
67 |
|
|
|
74 |
setTimeout(() => {
|
75 |
startTransition(async () => {
|
76 |
|
77 |
+
// console.log(`Loading panel ${panel}..`)
|
78 |
+
|
79 |
+
let newRendered: RenderedScene
|
80 |
+
try {
|
81 |
+
newRendered = await newRender({ prompt, width, height })
|
82 |
+
} catch (err) {
|
83 |
+
// "Failed to load the panel! Don't worry, we are retrying..")
|
84 |
+
newRendered = await newRender({ prompt, width, height })
|
85 |
+
}
|
86 |
|
87 |
+
if (newRendered) {
|
88 |
+
// console.log("newRendered:", newRendered)
|
89 |
+
setRendered(panelId, newRendered)
|
90 |
+
|
91 |
+
if (newRendered.status === "completed") {
|
92 |
+
setGeneratingImages(panelId, false)
|
93 |
+
addToUpscaleQueue(panelId, newRendered)
|
94 |
+
}
|
95 |
+
|
96 |
+
// but we are still loading!
|
97 |
+
} else {
|
98 |
+
setRendered(panelId, {
|
99 |
+
renderId: "",
|
100 |
+
status: "pending",
|
101 |
+
assetUrl: "",
|
102 |
+
alt: "",
|
103 |
+
maskUrl: "",
|
104 |
+
error: "",
|
105 |
+
segments: []
|
106 |
+
})
|
107 |
+
setGeneratingImages(panelId, false)
|
108 |
+
return
|
109 |
+
}
|
110 |
+
})
|
111 |
+
}, enableRateLimiter ? 2000 * panel : 0)
|
112 |
+
}
|
113 |
+
|
114 |
+
// since this run in its own loop, we need to use references everywhere
|
115 |
+
// but perhaps this could be refactored
|
116 |
+
useEffect(() => {
|
117 |
+
startImageGeneration({ prompt, width, height })
|
118 |
}, [prompt, width, height])
|
119 |
|
120 |
|
|
|
217 |
`print:border-[1.5px] print:shadow-none`,
|
218 |
)
|
219 |
|
220 |
+
const handleReload = () => {
|
221 |
+
console.log(`Asked to reload panel ${panelId}`)
|
222 |
+
startImageGeneration({ prompt, width, height })
|
223 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
224 |
|
225 |
if (prompt && !rendered.assetUrl) {
|
226 |
return (
|
|
|
239 |
frameClassName,
|
240 |
{ "grayscale": preset.color === "grayscale" },
|
241 |
className
|
242 |
+
)}
|
243 |
+
onMouseEnter={() => setMouseOver(true)}
|
244 |
+
onMouseLeave={() => setMouseOver(false)}
|
245 |
+
>
|
246 |
<div className={cn(
|
|
|
247 |
`bg-stone-50`,
|
248 |
`border-stone-800`,
|
249 |
`transition-all duration-200 ease-in-out`,
|
|
|
283 |
{rendered.assetUrl &&
|
284 |
<img
|
285 |
ref={ref}
|
286 |
+
src={rendered.assetUrl}
|
287 |
width={width}
|
288 |
height={height}
|
289 |
alt={rendered.alt}
|
|
|
292 |
// showCaptions ? `-mt-11` : ''
|
293 |
)}
|
294 |
/>}
|
295 |
+
{
|
296 |
+
// there is an issue, this env check doesn't work..
|
297 |
+
// process.env.NEXT_PUBLIC_CAN_REDRAW === "true" ?
|
298 |
+
<div
|
299 |
+
className={cn(`relative -mt-14 ml-4`,)}>
|
300 |
+
<div className="flex flex-row">
|
301 |
+
<div
|
302 |
+
onClick={rendered.status === "completed" ? handleReload : undefined}
|
303 |
+
className={cn(
|
304 |
+
`bg-stone-100 rounded-lg`,
|
305 |
+
`flex flex-row space-x-2 items-center`,
|
306 |
+
`py-2 px-3 cursor-pointer`,
|
307 |
+
`transition-all duration-200 ease-in-out`,
|
308 |
+
rendered.status === "completed" ? "opacity-95" : "opacity-50",
|
309 |
+
mouseOver && rendered.assetUrl ? `scale-95 hover:scale-100 hover:opacity-100`: `scale-0`
|
310 |
+
)}>
|
311 |
+
<RxReload
|
312 |
+
className="w-5 h-5"
|
313 |
+
/>
|
314 |
+
<span className="text-base">Redraw</span>
|
315 |
+
</div>
|
316 |
+
</div>
|
317 |
+
</div>
|
318 |
+
//: null
|
319 |
+
}
|
320 |
</div>
|
321 |
)
|
322 |
}
|
src/app/interface/top-menu/index.tsx
CHANGED
@@ -50,7 +50,7 @@ export function TopMenu() {
|
|
50 |
const isGeneratingStory = useStore(state => state.isGeneratingStory)
|
51 |
const atLeastOnePanelIsBusy = useStore(state => state.atLeastOnePanelIsBusy)
|
52 |
const isBusy = isGeneratingStory || atLeastOnePanelIsBusy
|
53 |
-
|
54 |
const searchParams = useSearchParams()
|
55 |
|
56 |
const requestedPreset = (searchParams.get('preset') as PresetName) || defaultPreset
|
|
|
50 |
const isGeneratingStory = useStore(state => state.isGeneratingStory)
|
51 |
const atLeastOnePanelIsBusy = useStore(state => state.atLeastOnePanelIsBusy)
|
52 |
const isBusy = isGeneratingStory || atLeastOnePanelIsBusy
|
53 |
+
|
54 |
const searchParams = useSearchParams()
|
55 |
|
56 |
const requestedPreset = (searchParams.get('preset') as PresetName) || defaultPreset
|
src/app/main.tsx
CHANGED
@@ -38,6 +38,8 @@ export default function Main() {
|
|
38 |
setWaitABitMore(false)
|
39 |
setGeneratingStory(true)
|
40 |
|
|
|
|
|
41 |
try {
|
42 |
|
43 |
const llmResponse = await getStory({ preset, prompt })
|
@@ -72,7 +74,7 @@ export default function Main() {
|
|
72 |
setTimeout(() => {
|
73 |
setGeneratingStory(false)
|
74 |
setWaitABitMore(false)
|
75 |
-
}, 12000)
|
76 |
}
|
77 |
})
|
78 |
}, [prompt, preset?.label]) // important: we need to react to preset changes too
|
|
|
38 |
setWaitABitMore(false)
|
39 |
setGeneratingStory(true)
|
40 |
|
41 |
+
const enableRateLimiter = `${process.env.NEXT_PUBLIC_ENABLE_RATE_LIMITER}` === "true"
|
42 |
+
|
43 |
try {
|
44 |
|
45 |
const llmResponse = await getStory({ preset, prompt })
|
|
|
74 |
setTimeout(() => {
|
75 |
setGeneratingStory(false)
|
76 |
setWaitABitMore(false)
|
77 |
+
}, enableRateLimiter ? 12000 : 0)
|
78 |
}
|
79 |
})
|
80 |
}, [prompt, preset?.label]) // important: we need to react to preset changes too
|
src/app/queries/getStory.ts
CHANGED
@@ -37,15 +37,15 @@ export const getStory = async ({
|
|
37 |
let result = ""
|
38 |
|
39 |
try {
|
40 |
-
result = await predict(query)
|
41 |
-
if (!result.
|
42 |
throw new Error("empty result!")
|
43 |
}
|
44 |
} catch (err) {
|
45 |
console.log(`prediction of the story failed, trying again..`)
|
46 |
try {
|
47 |
-
result = await predict(query+".")
|
48 |
-
if (!result.
|
49 |
throw new Error("empty result!")
|
50 |
}
|
51 |
} catch (err) {
|
|
|
37 |
let result = ""
|
38 |
|
39 |
try {
|
40 |
+
result = `${await predict(query) || ""}`.trim()
|
41 |
+
if (!result.length) {
|
42 |
throw new Error("empty result!")
|
43 |
}
|
44 |
} catch (err) {
|
45 |
console.log(`prediction of the story failed, trying again..`)
|
46 |
try {
|
47 |
+
result = `${await predict(query+".") || ""}`.trim()
|
48 |
+
if (!result.length) {
|
49 |
throw new Error("empty result!")
|
50 |
}
|
51 |
} catch (err) {
|
src/app/queries/getStyle.ts
CHANGED
@@ -30,15 +30,15 @@ export const getStory = async ({
|
|
30 |
|
31 |
let result = ""
|
32 |
try {
|
33 |
-
result = await predict(query)
|
34 |
-
if (!result.
|
35 |
throw new Error("empty result!")
|
36 |
}
|
37 |
} catch (err) {
|
38 |
console.log(`prediction of the story failed, trying again..`)
|
39 |
try {
|
40 |
-
result = await predict(query+".")
|
41 |
-
if (!result.
|
42 |
throw new Error("empty result!")
|
43 |
}
|
44 |
} catch (err) {
|
|
|
30 |
|
31 |
let result = ""
|
32 |
try {
|
33 |
+
result = `${await predict(query) || ""}`.trim()
|
34 |
+
if (!result.length) {
|
35 |
throw new Error("empty result!")
|
36 |
}
|
37 |
} catch (err) {
|
38 |
console.log(`prediction of the story failed, trying again..`)
|
39 |
try {
|
40 |
+
result = `${await predict(query+".") || ""}`.trim()
|
41 |
+
if (!result.length) {
|
42 |
throw new Error("empty result!")
|
43 |
}
|
44 |
} catch (err) {
|
src/app/queries/predict.ts
CHANGED
@@ -1,140 +1,9 @@
|
|
1 |
"use server"
|
2 |
|
3 |
-
import { HfInference, HfInferenceEndpoint } from "@huggingface/inference"
|
4 |
-
|
5 |
-
import type { ChatCompletionMessage } from "openai/resources/chat"
|
6 |
import { LLMEngine } from "@/types"
|
7 |
-
import
|
8 |
-
|
9 |
-
const hf = new HfInference(process.env.HF_API_TOKEN)
|
10 |
|
11 |
-
|
12 |
-
// note: we always try "inference endpoint" first
|
13 |
const llmEngine = `${process.env.LLM_ENGINE || ""}` as LLMEngine
|
14 |
-
const inferenceEndpoint = `${process.env.HF_INFERENCE_ENDPOINT_URL || ""}`
|
15 |
-
const inferenceModel = `${process.env.HF_INFERENCE_API_MODEL || ""}`
|
16 |
-
const openaiApiKey = `${process.env.OPENAI_API_KEY || ""}`
|
17 |
-
|
18 |
-
let hfie: HfInferenceEndpoint
|
19 |
-
|
20 |
-
switch (llmEngine) {
|
21 |
-
case "INFERENCE_ENDPOINT":
|
22 |
-
if (inferenceEndpoint) {
|
23 |
-
console.log("Using a custom HF Inference Endpoint")
|
24 |
-
hfie = hf.endpoint(inferenceEndpoint)
|
25 |
-
} else {
|
26 |
-
const error = "No Inference Endpoint URL defined"
|
27 |
-
console.error(error)
|
28 |
-
throw new Error(error)
|
29 |
-
}
|
30 |
-
break;
|
31 |
-
|
32 |
-
case "INFERENCE_API":
|
33 |
-
if (inferenceModel) {
|
34 |
-
console.log("Using an HF Inference API Model")
|
35 |
-
} else {
|
36 |
-
const error = "No Inference API model defined"
|
37 |
-
console.error(error)
|
38 |
-
throw new Error(error)
|
39 |
-
}
|
40 |
-
break;
|
41 |
-
|
42 |
-
case "OPENAI":
|
43 |
-
if (openaiApiKey) {
|
44 |
-
console.log("Using an OpenAI API Key")
|
45 |
-
} else {
|
46 |
-
const error = "No OpenAI API key defined"
|
47 |
-
console.error(error)
|
48 |
-
throw new Error(error)
|
49 |
-
}
|
50 |
-
break;
|
51 |
-
|
52 |
-
default:
|
53 |
-
const error = "No Inference Endpoint URL or Inference API Model defined"
|
54 |
-
console.error(error)
|
55 |
-
throw new Error(error)
|
56 |
-
}
|
57 |
-
|
58 |
-
export async function predict(inputs: string) {
|
59 |
-
|
60 |
-
console.log(`predict: `, inputs)
|
61 |
-
|
62 |
-
if (llmEngine==="OPENAI") {
|
63 |
-
return predictWithOpenAI(inputs)
|
64 |
-
}
|
65 |
-
|
66 |
-
const api = llmEngine ==="INFERENCE_ENDPOINT" ? hfie : hf
|
67 |
-
|
68 |
-
let instructions = ""
|
69 |
-
try {
|
70 |
-
for await (const output of api.textGenerationStream({
|
71 |
-
model: llmEngine ==="INFERENCE_ENDPOINT" ? undefined : (inferenceModel || undefined),
|
72 |
-
inputs,
|
73 |
-
parameters: {
|
74 |
-
do_sample: true,
|
75 |
-
// we don't require a lot of token for our task
|
76 |
-
// but to be safe, let's count ~110 tokens per panel
|
77 |
-
max_new_tokens: 450, // 1150,
|
78 |
-
return_full_text: false,
|
79 |
-
}
|
80 |
-
})) {
|
81 |
-
instructions += output.token.text
|
82 |
-
process.stdout.write(output.token.text)
|
83 |
-
if (
|
84 |
-
instructions.includes("</s>") ||
|
85 |
-
instructions.includes("<s>") ||
|
86 |
-
instructions.includes("[INST]") ||
|
87 |
-
instructions.includes("[/INST]") ||
|
88 |
-
instructions.includes("<SYS>") ||
|
89 |
-
instructions.includes("</SYS>") ||
|
90 |
-
instructions.includes("<|end|>") ||
|
91 |
-
instructions.includes("<|assistant|>")
|
92 |
-
) {
|
93 |
-
break
|
94 |
-
}
|
95 |
-
}
|
96 |
-
} catch (err) {
|
97 |
-
console.error(`error during generation: ${err}`)
|
98 |
-
}
|
99 |
-
|
100 |
-
// need to do some cleanup of the garbage the LLM might have gave us
|
101 |
-
return (
|
102 |
-
instructions
|
103 |
-
.replaceAll("<|end|>", "")
|
104 |
-
.replaceAll("<s>", "")
|
105 |
-
.replaceAll("</s>", "")
|
106 |
-
.replaceAll("[INST]", "")
|
107 |
-
.replaceAll("[/INST]", "")
|
108 |
-
.replaceAll("<SYS>", "")
|
109 |
-
.replaceAll("</SYS>", "")
|
110 |
-
.replaceAll("<|assistant|>", "")
|
111 |
-
.replaceAll('""', '"')
|
112 |
-
)
|
113 |
-
}
|
114 |
-
|
115 |
-
async function predictWithOpenAI(inputs: string) {
|
116 |
-
const openaiApiBaseUrl = `${process.env.OPENAI_API_BASE_URL || "https://api.openai.com/v1"}`
|
117 |
-
const openaiApiModel = `${process.env.OPENAI_API_MODEL || "gpt-3.5-turbo"}`
|
118 |
-
|
119 |
-
const openai = new OpenAI({
|
120 |
-
apiKey: openaiApiKey,
|
121 |
-
baseURL: openaiApiBaseUrl,
|
122 |
-
})
|
123 |
-
|
124 |
-
const messages: ChatCompletionMessage[] = [
|
125 |
-
{ role: "system", content: inputs },
|
126 |
-
]
|
127 |
-
|
128 |
-
try {
|
129 |
-
const res = await openai.chat.completions.create({
|
130 |
-
messages: messages,
|
131 |
-
stream: false,
|
132 |
-
model: openaiApiModel,
|
133 |
-
temperature: 0.8
|
134 |
-
})
|
135 |
|
136 |
-
|
137 |
-
} catch (err) {
|
138 |
-
console.error(`error during generation: ${err}`)
|
139 |
-
}
|
140 |
-
}
|
|
|
1 |
"use server"
|
2 |
|
|
|
|
|
|
|
3 |
import { LLMEngine } from "@/types"
|
4 |
+
import { predictWithHuggingFace } from "./predictWithHuggingFace"
|
5 |
+
import { predictWithOpenAI } from "./predictWithOpenAI"
|
|
|
6 |
|
|
|
|
|
7 |
const llmEngine = `${process.env.LLM_ENGINE || ""}` as LLMEngine
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
+
export const predict = llmEngine === "OPENAI" ? predictWithOpenAI : predictWithHuggingFace
|
|
|
|
|
|
|
|
src/app/queries/predictWithHuggingFace.ts
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use server"
|
2 |
+
|
3 |
+
import { HfInference, HfInferenceEndpoint } from "@huggingface/inference"
|
4 |
+
import { LLMEngine } from "@/types"
|
5 |
+
|
6 |
+
const hf = new HfInference(process.env.AUTH_HF_API_TOKEN)
|
7 |
+
|
8 |
+
const llmEngine = `${process.env.LLM_ENGINE || ""}` as LLMEngine
|
9 |
+
const inferenceEndpoint = `${process.env.LLM_HF_INFERENCE_ENDPOINT_URL || ""}`
|
10 |
+
const inferenceModel = `${process.env.LLM_HF_INFERENCE_API_MODEL || ""}`
|
11 |
+
|
12 |
+
let hfie: HfInferenceEndpoint = hf
|
13 |
+
|
14 |
+
switch (llmEngine) {
|
15 |
+
case "INFERENCE_ENDPOINT":
|
16 |
+
if (inferenceEndpoint) {
|
17 |
+
console.log("Using a custom HF Inference Endpoint")
|
18 |
+
hfie = hf.endpoint(inferenceEndpoint)
|
19 |
+
} else {
|
20 |
+
const error = "No Inference Endpoint URL defined"
|
21 |
+
console.error(error)
|
22 |
+
throw new Error(error)
|
23 |
+
}
|
24 |
+
break;
|
25 |
+
|
26 |
+
case "INFERENCE_API":
|
27 |
+
if (inferenceModel) {
|
28 |
+
console.log("Using an HF Inference API Model")
|
29 |
+
} else {
|
30 |
+
const error = "No Inference API model defined"
|
31 |
+
console.error(error)
|
32 |
+
throw new Error(error)
|
33 |
+
}
|
34 |
+
break;
|
35 |
+
|
36 |
+
default:
|
37 |
+
const error = "Please check your Hugging Face Inference API or Inference Endpoint settings"
|
38 |
+
console.error(error)
|
39 |
+
throw new Error(error)
|
40 |
+
}
|
41 |
+
|
42 |
+
const api = llmEngine === "INFERENCE_ENDPOINT" ? hfie : hf
|
43 |
+
|
44 |
+
export async function predictWithHuggingFace(inputs: string) {
|
45 |
+
let instructions = ""
|
46 |
+
try {
|
47 |
+
for await (const output of api.textGenerationStream({
|
48 |
+
model: llmEngine === "INFERENCE_ENDPOINT" ? undefined : (inferenceModel || undefined),
|
49 |
+
inputs,
|
50 |
+
parameters: {
|
51 |
+
do_sample: true,
|
52 |
+
// we don't require a lot of token for our task
|
53 |
+
// but to be safe, let's count ~110 tokens per panel
|
54 |
+
max_new_tokens: 450, // 1150,
|
55 |
+
return_full_text: false,
|
56 |
+
}
|
57 |
+
})) {
|
58 |
+
instructions += output.token.text
|
59 |
+
process.stdout.write(output.token.text)
|
60 |
+
if (
|
61 |
+
instructions.includes("</s>") ||
|
62 |
+
instructions.includes("<s>") ||
|
63 |
+
instructions.includes("[INST]") ||
|
64 |
+
instructions.includes("[/INST]") ||
|
65 |
+
instructions.includes("<SYS>") ||
|
66 |
+
instructions.includes("</SYS>") ||
|
67 |
+
instructions.includes("<|end|>") ||
|
68 |
+
instructions.includes("<|assistant|>")
|
69 |
+
) {
|
70 |
+
break
|
71 |
+
}
|
72 |
+
}
|
73 |
+
} catch (err) {
|
74 |
+
console.error(`error during generation: ${err}`)
|
75 |
+
}
|
76 |
+
|
77 |
+
// need to do some cleanup of the garbage the LLM might have gave us
|
78 |
+
return (
|
79 |
+
instructions
|
80 |
+
.replaceAll("<|end|>", "")
|
81 |
+
.replaceAll("<s>", "")
|
82 |
+
.replaceAll("</s>", "")
|
83 |
+
.replaceAll("[INST]", "")
|
84 |
+
.replaceAll("[/INST]", "")
|
85 |
+
.replaceAll("<SYS>", "")
|
86 |
+
.replaceAll("</SYS>", "")
|
87 |
+
.replaceAll("<|assistant|>", "")
|
88 |
+
.replaceAll('""', '"')
|
89 |
+
)
|
90 |
+
}
|
src/app/queries/predictWithOpenAI.ts
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use server"
|
2 |
+
|
3 |
+
import type { ChatCompletionMessage } from "openai/resources/chat"
|
4 |
+
import OpenAI from "openai"
|
5 |
+
|
6 |
+
const openaiApiKey = `${process.env.AUTH_OPENAI_API_KEY || ""}`
|
7 |
+
|
8 |
+
export async function predictWithOpenAI(inputs: string) {
|
9 |
+
const openaiApiBaseUrl = `${process.env.LLM_OPENAI_API_BASE_URL || "https://api.openai.com/v1"}`
|
10 |
+
const openaiApiModel = `${process.env.LLM_OPENAI_API_MODEL || "gpt-3.5-turbo"}`
|
11 |
+
|
12 |
+
const openai = new OpenAI({
|
13 |
+
apiKey: openaiApiKey,
|
14 |
+
baseURL: openaiApiBaseUrl,
|
15 |
+
})
|
16 |
+
|
17 |
+
const messages: ChatCompletionMessage[] = [
|
18 |
+
{ role: "system", content: inputs },
|
19 |
+
]
|
20 |
+
|
21 |
+
try {
|
22 |
+
const res = await openai.chat.completions.create({
|
23 |
+
messages: messages,
|
24 |
+
stream: false,
|
25 |
+
model: openaiApiModel,
|
26 |
+
temperature: 0.8
|
27 |
+
})
|
28 |
+
|
29 |
+
return res.choices[0].message.content
|
30 |
+
} catch (err) {
|
31 |
+
console.error(`error during generation: ${err}`)
|
32 |
+
}
|
33 |
+
}
|
src/types.ts
CHANGED
@@ -91,6 +91,8 @@ export type RenderingEngine =
|
|
91 |
| "VIDEOCHAIN"
|
92 |
| "OPENAI"
|
93 |
| "REPLICATE"
|
|
|
|
|
94 |
|
95 |
export type PostVisibility =
|
96 |
| "featured" // featured by admins
|
|
|
91 |
| "VIDEOCHAIN"
|
92 |
| "OPENAI"
|
93 |
| "REPLICATE"
|
94 |
+
| "INFERENCE_API"
|
95 |
+
| "INFERENCE_ENDPOINT"
|
96 |
|
97 |
export type PostVisibility =
|
98 |
| "featured" // featured by admins
|