Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Commit
β’
9bfb451
1
Parent(s):
faf4ba4
add new "fast LLM" optimization
Browse files
src/app/main.tsx
CHANGED
@@ -7,7 +7,6 @@ import { TopMenu } from "./interface/top-menu"
|
|
7 |
import { fonts } from "@/lib/fonts"
|
8 |
import { useStore } from "./store"
|
9 |
import { Zoom } from "./interface/zoom"
|
10 |
-
import { getStory } from "./queries/getStory"
|
11 |
import { BottomBar } from "./interface/bottom-bar"
|
12 |
import { Page } from "./interface/page"
|
13 |
import { GeneratedPanel } from "@/types"
|
@@ -88,7 +87,8 @@ export default function Main() {
|
|
88 |
nbPanelsToGenerate,
|
89 |
existingPanels,
|
90 |
})
|
91 |
-
console.log("LLM generated some new panels:"
|
|
|
92 |
|
93 |
existingPanels.push(...candidatePanels)
|
94 |
|
@@ -114,7 +114,7 @@ export default function Main() {
|
|
114 |
}
|
115 |
|
116 |
// update the frontend
|
117 |
-
console.log("updating the frontend..")
|
118 |
setCaptions(newCaptions)
|
119 |
setPanels(newPanelsPrompts)
|
120 |
|
|
|
7 |
import { fonts } from "@/lib/fonts"
|
8 |
import { useStore } from "./store"
|
9 |
import { Zoom } from "./interface/zoom"
|
|
|
10 |
import { BottomBar } from "./interface/bottom-bar"
|
11 |
import { Page } from "./interface/page"
|
12 |
import { GeneratedPanel } from "@/types"
|
|
|
87 |
nbPanelsToGenerate,
|
88 |
existingPanels,
|
89 |
})
|
90 |
+
console.log("LLM generated some new panels:")
|
91 |
+
console.table(candidatePanels)
|
92 |
|
93 |
existingPanels.push(...candidatePanels)
|
94 |
|
|
|
114 |
}
|
115 |
|
116 |
// update the frontend
|
117 |
+
// console.log("updating the frontend..")
|
118 |
setCaptions(newCaptions)
|
119 |
setPanels(newPanelsPrompts)
|
120 |
|
src/app/queries/getStory.ts
CHANGED
@@ -17,7 +17,7 @@ export const getStory = async ({
|
|
17 |
prompt: string;
|
18 |
nbTotalPanels: number;
|
19 |
}): Promise<GeneratedPanels> => {
|
20 |
-
|
21 |
|
22 |
// In case you need to quickly debug the RENDERING engine you can uncomment this:
|
23 |
// return mockGeneratedPanels
|
|
|
17 |
prompt: string;
|
18 |
nbTotalPanels: number;
|
19 |
}): Promise<GeneratedPanels> => {
|
20 |
+
throw new Error("legacy, deprecated")
|
21 |
|
22 |
// In case you need to quickly debug the RENDERING engine you can uncomment this:
|
23 |
// return mockGeneratedPanels
|
src/app/queries/getStoryContinuation.ts
CHANGED
@@ -32,23 +32,22 @@ export const getStoryContinuation = async ({
|
|
32 |
existingPanels,
|
33 |
})
|
34 |
|
35 |
-
console.log("LLM responded with
|
36 |
-
|
37 |
|
38 |
// we clean the output from the LLM
|
39 |
// most importantly, we need to adjust the panel index,
|
40 |
// to start from where we last finished
|
41 |
-
for (let
|
42 |
panels.push({
|
43 |
-
panel:
|
44 |
-
instructions: `${panelCandidates[
|
45 |
-
caption: `${panelCandidates[
|
46 |
})
|
47 |
}
|
48 |
|
49 |
} catch (err) {
|
50 |
-
console.log("LLM step failed due to:", err)
|
51 |
-
console.log("we are now switching to a degraded mode, using 4 similar panels")
|
52 |
panels = []
|
53 |
for (let p = startAt; p < endAt; p++) {
|
54 |
panels.push({
|
@@ -61,7 +60,7 @@ export const getStoryContinuation = async ({
|
|
61 |
caption: "(Sorry, LLM generation failed: using degraded mode)"
|
62 |
})
|
63 |
}
|
64 |
-
console.error(err)
|
65 |
} finally {
|
66 |
return panels
|
67 |
}
|
|
|
32 |
existingPanels,
|
33 |
})
|
34 |
|
35 |
+
// console.log("LLM responded with panelCandidates:", panelCandidates)
|
|
|
36 |
|
37 |
// we clean the output from the LLM
|
38 |
// most importantly, we need to adjust the panel index,
|
39 |
// to start from where we last finished
|
40 |
+
for (let i = 0; i < nbPanelsToGenerate; i++) {
|
41 |
panels.push({
|
42 |
+
panel: startAt + i,
|
43 |
+
instructions: `${panelCandidates[i]?.instructions || ""}`,
|
44 |
+
caption: `${panelCandidates[i]?.caption || ""}`,
|
45 |
})
|
46 |
}
|
47 |
|
48 |
} catch (err) {
|
49 |
+
// console.log("LLM step failed due to:", err)
|
50 |
+
// console.log("we are now switching to a degraded mode, using 4 similar panels")
|
51 |
panels = []
|
52 |
for (let p = startAt; p < endAt; p++) {
|
53 |
panels.push({
|
|
|
60 |
caption: "(Sorry, LLM generation failed: using degraded mode)"
|
61 |
})
|
62 |
}
|
63 |
+
// console.error(err)
|
64 |
} finally {
|
65 |
return panels
|
66 |
}
|
src/app/queries/predictNextPanels.ts
CHANGED
@@ -18,6 +18,7 @@ export const predictNextPanels = async ({
|
|
18 |
nbPanelsToGenerate: number;
|
19 |
existingPanels: GeneratedPanel[];
|
20 |
}): Promise<GeneratedPanel[]> => {
|
|
|
21 |
// throw new Error("Planned maintenance")
|
22 |
|
23 |
// In case you need to quickly debug the RENDERING engine you can uncomment this:
|
|
|
18 |
nbPanelsToGenerate: number;
|
19 |
existingPanels: GeneratedPanel[];
|
20 |
}): Promise<GeneratedPanel[]> => {
|
21 |
+
// console.log("predictNextPanels: ", { prompt, nbPanelsToGenerate })
|
22 |
// throw new Error("Planned maintenance")
|
23 |
|
24 |
// In case you need to quickly debug the RENDERING engine you can uncomment this:
|
src/app/queries/predictWithHuggingFace.ts
CHANGED
@@ -58,7 +58,7 @@ export async function predict(inputs: string, nbPanels: number): Promise<string>
|
|
58 |
}
|
59 |
})) {
|
60 |
instructions += output.token.text
|
61 |
-
process.stdout.write(output.token.text)
|
62 |
if (
|
63 |
instructions.includes("</s>") ||
|
64 |
instructions.includes("<s>") ||
|
@@ -78,7 +78,7 @@ export async function predict(inputs: string, nbPanels: number): Promise<string>
|
|
78 |
}
|
79 |
}
|
80 |
} catch (err) {
|
81 |
-
console.error(`error during generation: ${err}`)
|
82 |
|
83 |
// a common issue with Llama-2 might be that the model receives too many requests
|
84 |
if (`${err}` === "Error: Model is overloaded") {
|
|
|
58 |
}
|
59 |
})) {
|
60 |
instructions += output.token.text
|
61 |
+
// process.stdout.write(output.token.text)
|
62 |
if (
|
63 |
instructions.includes("</s>") ||
|
64 |
instructions.includes("<s>") ||
|
|
|
78 |
}
|
79 |
}
|
80 |
} catch (err) {
|
81 |
+
// console.error(`error during generation: ${err}`)
|
82 |
|
83 |
// a common issue with Llama-2 might be that the model receives too many requests
|
84 |
if (`${err}` === "Error: Model is overloaded") {
|
src/lib/dirtyLLMResponseCleaner.ts
CHANGED
@@ -2,12 +2,14 @@ export function dirtyLLMResponseCleaner(input: string): string {
|
|
2 |
let str = (
|
3 |
`${input || ""}`
|
4 |
// a summary of all the weird hallucinations I saw it make..
|
|
|
5 |
.replaceAll(`"]`, `"}]`)
|
6 |
-
.replaceAll(
|
7 |
-
.replaceAll(
|
8 |
-
|
9 |
-
|
10 |
-
.
|
|
|
11 |
.replaceAll("}}", "}")
|
12 |
.replaceAll("]]", "]")
|
13 |
.replaceAll("[[", "[")
|
|
|
2 |
let str = (
|
3 |
`${input || ""}`
|
4 |
// a summary of all the weird hallucinations I saw it make..
|
5 |
+
.replaceAll(`"\n`, `",\n`) // fix missing commas at the end of a line
|
6 |
.replaceAll(`"]`, `"}]`)
|
7 |
+
.replaceAll(/"\S*,?\S*\]/gi, `"}]`)
|
8 |
+
.replaceAll(/"\S*,?\S*\}\S*]/gi, `"}]`)
|
9 |
+
|
10 |
+
// this removes the trailing commas (which are valid in JS but not JSON)
|
11 |
+
.replace(/,(?=\s*?[\}\]])/g, '')
|
12 |
+
|
13 |
.replaceAll("}}", "}")
|
14 |
.replaceAll("]]", "]")
|
15 |
.replaceAll("[[", "[")
|