Spaces:
Running
Running
Commit
·
2bf2b98
1
Parent(s):
3178417
Refactor image generation functions to use category instead of summary and improve prompt clarity
Browse files- image.py +14 -17
- main.py +11 -7
- post_blog.py +5 -2
image.py
CHANGED
@@ -6,8 +6,7 @@ import requests
|
|
6 |
from urllib.parse import quote
|
7 |
from PIL import Image
|
8 |
from g4f.client import Client
|
9 |
-
from g4f.Provider import RetryProvider,
|
10 |
-
|
11 |
|
12 |
def extract_summary(text):
|
13 |
text = text.replace("#", "").strip().lower()
|
@@ -22,27 +21,30 @@ def fix_base64_padding(data):
|
|
22 |
data += "=" * (4 - missing_padding)
|
23 |
return data
|
24 |
|
25 |
-
def generate_image(title,
|
|
|
|
|
|
|
26 |
try:
|
27 |
negative="low quality, blurry, pixelated, bad anatomy, bad hands, three hands, three legs, bad arms, missing legs, missing arms, poorly drawn face, poorly rendered hands, bad face, fused face, cloned face, worst face, three crus, extra crus, fused crus, worst feet, three feet, fused feet, fused thigh, three thigh, extra thigh, worst thigh, missing fingers, extra fingers, ugly fingers, long fingers, bad composition, horn, extra eyes, huge eyes, 2girl, amputation, disconnected limbs, cartoon, cg, 3d, unreal, animate, cgi, render, artwork, illustration, 3d render, cinema 4d, artstation, octane render, mutated body parts, painting, oil painting, 2d, sketch, bad photography, bad photo, deviant art, aberrations, abstract, anime, black and white, collapsed, conjoined, creative, drawing, extra windows, harsh lighting, jpeg artifacts, low saturation, monochrome, multiple levels, overexposed, oversaturated, photoshop, rotten, surreal, twisted, UI, underexposed, unnatural, unreal engine, unrealistic, video game, deformed body features, NSFW, NUDE, vulgar, negative, unsuitable, inappropriate, offensive, revealing, sexual, explicit",
|
28 |
-
|
29 |
-
prompt = quote(f"[[IMAGE GENERATED SHOULD BE SAFE FOR WORK (SFW). NO NUDES OR ANYTHING REVEALING IMAGES NOR SHOULD THEY BE VULGAR OR UNSCIENTIFIC]] [[(({title.strip()}))]]: {extracted_summary.strip()}")
|
30 |
client = Client(
|
31 |
image_provider=RetryProvider(
|
32 |
-
providers=[
|
33 |
shuffle=True,
|
34 |
single_provider_retry=True,
|
35 |
max_retries=3,
|
36 |
)
|
37 |
)
|
38 |
img_data = client.images.generate(
|
39 |
-
model="flux",
|
40 |
prompt=prompt,
|
41 |
negative_prompt=negative,
|
42 |
response_format="b64_json",
|
43 |
width=1024,
|
44 |
height=576,
|
45 |
).data[0].b64_json
|
|
|
46 |
if img_data:
|
47 |
return f"data:image/png;base64,{img_data}"
|
48 |
return None
|
@@ -77,17 +79,12 @@ def upload_image(data_uri, api_key):
|
|
77 |
finally:
|
78 |
return image_url
|
79 |
|
80 |
-
def fetch_image(title,
|
81 |
title = r"{}".format(title)
|
82 |
-
|
83 |
image_url = "https://i.ibb.co/TBJqggw/Image-Not-Found.jpg"
|
84 |
try:
|
85 |
-
data_uri =
|
86 |
-
i = 1
|
87 |
-
while not data_uri and i <= 3:
|
88 |
-
print(f"Attempt {i} to fetch image")
|
89 |
-
data_uri = generate_image(title, summary)
|
90 |
-
i += 1
|
91 |
if data_uri:
|
92 |
base64_image = fix_base64_padding(data_uri.split(",")[1])
|
93 |
image_data = None
|
@@ -111,7 +108,7 @@ def fetch_image(title, summary, api_key):
|
|
111 |
|
112 |
if __name__ == "__main__":
|
113 |
title = "Accelerated cell-type-specific regulatory evolution of the Homo sapiens brain"
|
114 |
-
|
115 |
api_key = "aa38b04047587c609f5c7e22f9d840f0"
|
116 |
-
image_url = fetch_image(title,
|
117 |
print(image_url)
|
|
|
6 |
from urllib.parse import quote
|
7 |
from PIL import Image
|
8 |
from g4f.client import Client
|
9 |
+
from g4f.Provider import RetryProvider, Blackbox, BlackboxCreateAgent, PollinationsAI, Airforce
|
|
|
10 |
|
11 |
def extract_summary(text):
|
12 |
text = text.replace("#", "").strip().lower()
|
|
|
21 |
data += "=" * (4 - missing_padding)
|
22 |
return data
|
23 |
|
24 |
+
def generate_image(title, category):
|
25 |
+
print("Generating image...")
|
26 |
+
import time
|
27 |
+
start = time.time()
|
28 |
try:
|
29 |
negative="low quality, blurry, pixelated, bad anatomy, bad hands, three hands, three legs, bad arms, missing legs, missing arms, poorly drawn face, poorly rendered hands, bad face, fused face, cloned face, worst face, three crus, extra crus, fused crus, worst feet, three feet, fused feet, fused thigh, three thigh, extra thigh, worst thigh, missing fingers, extra fingers, ugly fingers, long fingers, bad composition, horn, extra eyes, huge eyes, 2girl, amputation, disconnected limbs, cartoon, cg, 3d, unreal, animate, cgi, render, artwork, illustration, 3d render, cinema 4d, artstation, octane render, mutated body parts, painting, oil painting, 2d, sketch, bad photography, bad photo, deviant art, aberrations, abstract, anime, black and white, collapsed, conjoined, creative, drawing, extra windows, harsh lighting, jpeg artifacts, low saturation, monochrome, multiple levels, overexposed, oversaturated, photoshop, rotten, surreal, twisted, UI, underexposed, unnatural, unreal engine, unrealistic, video game, deformed body features, NSFW, NUDE, vulgar, negative, unsuitable, inappropriate, offensive, revealing, sexual, explicit",
|
30 |
+
prompt = quote(f"Generate accurate image representing the concept: ```((( {category}))) {title.strip() }```")
|
|
|
31 |
client = Client(
|
32 |
image_provider=RetryProvider(
|
33 |
+
providers=[PollinationsAI, Airforce],
|
34 |
shuffle=True,
|
35 |
single_provider_retry=True,
|
36 |
max_retries=3,
|
37 |
)
|
38 |
)
|
39 |
img_data = client.images.generate(
|
40 |
+
model="flux-pro",
|
41 |
prompt=prompt,
|
42 |
negative_prompt=negative,
|
43 |
response_format="b64_json",
|
44 |
width=1024,
|
45 |
height=576,
|
46 |
).data[0].b64_json
|
47 |
+
print(f"Image generated in {time.time() - start:.2f} seconds")
|
48 |
if img_data:
|
49 |
return f"data:image/png;base64,{img_data}"
|
50 |
return None
|
|
|
79 |
finally:
|
80 |
return image_url
|
81 |
|
82 |
+
def fetch_image(title, category, api_key):
|
83 |
title = r"{}".format(title)
|
84 |
+
category = r"{}".format(category)
|
85 |
image_url = "https://i.ibb.co/TBJqggw/Image-Not-Found.jpg"
|
86 |
try:
|
87 |
+
data_uri = generate_image(title, category)
|
|
|
|
|
|
|
|
|
|
|
88 |
if data_uri:
|
89 |
base64_image = fix_base64_padding(data_uri.split(",")[1])
|
90 |
image_data = None
|
|
|
108 |
|
109 |
if __name__ == "__main__":
|
110 |
title = "Accelerated cell-type-specific regulatory evolution of the Homo sapiens brain"
|
111 |
+
category = "This study investigates the accelerated regulatory evolution of gene expression in human brain cell types compared to chimpanzees. It reveals significant differences in gene expression, particularly in excitatory and inhibitory neurons, highlighting the role of regulatory evolution in human cognitive and behavioral traits"
|
112 |
api_key = "aa38b04047587c609f5c7e22f9d840f0"
|
113 |
+
image_url = fetch_image(title, category, api_key)
|
114 |
print(image_url)
|
main.py
CHANGED
@@ -75,13 +75,17 @@ def test(uaccess_key):
|
|
75 |
if uaccess_key != access_key:
|
76 |
return False
|
77 |
data = {
|
78 |
-
"
|
79 |
-
"
|
80 |
-
"
|
81 |
-
"
|
82 |
-
"
|
83 |
-
"
|
84 |
-
|
|
|
|
|
|
|
|
|
85 |
},
|
86 |
}
|
87 |
status = paper_data(json.dumps(data, ensure_ascii=False, indent=4))
|
|
|
75 |
if uaccess_key != access_key:
|
76 |
return False
|
77 |
data = {
|
78 |
+
"Economics": {
|
79 |
+
"2501.00578":{
|
80 |
+
"paper_id":"2501.00578",
|
81 |
+
"doi":"https://doi.org/10.48550/arXiv.2501.00578",
|
82 |
+
"title":"The Limits of Tolerance",
|
83 |
+
"category":"Economics",
|
84 |
+
"pdf_url":"https://arxiv.org/pdf/2501.00578",
|
85 |
+
"citation":"Miller, A. D. (2025). The limits of tolerance (Version 1). arXiv. https://doi.org/10.48550/ARXIV.2501.00578",
|
86 |
+
"summary":"## Summary\nThe text discusses the concept of community standards in relation to good faith terminations and the criminalization of obscene speech in the U.S. It introduces a model for aggregating community judgments represented as intervals, focusing on endpoint rules that can ensure strategyproofness and responsiveness in preference aggregation.\n\n## Highlights\n- Community standards are referenced for good faith terminations.\n- Obscene speech can be criminalized based on contemporary community standards.\n- A model is introduced where judgments are represented as intervals on the real line.\n- The p,q-th endpoint rule aggregates these judgments.\n- Endpoint rules allow for independent aggregation of upper and lower endpoints.\n- Generalized single-peaked preferences are defined for effective aggregation.\n- The maximal rule represents the most liberal approach to community standards.\n\n## Key Insights\n- Community Standards and Good Faith: The reliance on community standards for terminations reflects societal values, which can fluctuate over time, necessitating a flexible approach to legal interpretations.\n- Obscenity and Free Speech: The ability to criminalize obscene speech underlines the tension between individual rights and community morals, highlighting the complexities in defining obscenity.\n- Interval Representation: The model of representing judgments as intervals allows for a more nuanced understanding of community preferences, accommodating diverse opinions while still aiming for consensus.\n- Endpoint Rules: These rules facilitate a structured method for aggregating individual judgments, ensuring that preferences are considered without diminishing the integrity of the decision-making process.\n- Generalized Single-Peaked Preferences: This concept aids in simplifying the aggregation of preferences by establishing a clear peak, which enhances the efficiency of collective decision-making.\n- Strategyproofness in Voting: By ensuring that preferences are aggregated independently, the model maintains fairness and prevents manipulation, reinforcing the integrity of democratic processes.\n- Liberalism vs. Democracy: The discussion draws a connection between endpoint rules and broader political philosophies, suggesting that the balance between individual freedoms and collective decision-making is crucial for effective governance.",
|
87 |
+
"mindmap":"## Terminations of Good Faith\n- Reference to community standards of decency\n- Fairness and reasonableness in terminations\n\n## Obscenity and Free Speech\n- Criminalization based on contemporary community standards\n- Free speech protections may not apply\n\n## Community Standards Model\n- Standards represented by real line intervals\n- Judgment takes the form of intervals\n\n## Endpoint Rules\n- Defined by p-th lowest and q-th highest\n- Aggregate judgments based on endpoints\n\n## Strategyproofness Implications\n- Restrict allowable preferences for strategyproofness\n- Single-peaked preferences enable non-dictatorial rules\n\n## Generalized Single-Peaked Preferences\n- Unique preferred interval called the \"peak\"\n- Intervals between peak and third are preferred\n\n## Aggregation of Endpoints\n- Independent aggregation of upper and lower endpoints\n- Characterized by responsiveness and neutrality axioms\n\n## Median and Maximal Rules\n- Median rule is least permissive, satisfies conditions\n- Maximal rule allows any reasonable action\n\n## Phantom Intervals\n- Include half-bounded and fully unbounded intervals\n- Used in interval aggregation models\n\n## Axioms in Aggregation Methods\n- Seven axioms define endpoint rule characteristics\n- Ensures fair aggregation of individual judgments"
|
88 |
+
},
|
89 |
},
|
90 |
}
|
91 |
status = paper_data(json.dumps(data, ensure_ascii=False, indent=4))
|
post_blog.py
CHANGED
@@ -15,10 +15,9 @@ imgbb_api_key = os.getenv('IMGBB_API_KEY')
|
|
15 |
|
16 |
def generate_post_html(doi, title, category, summary, mindmap, citation):
|
17 |
doi = doi.split("https://")[-1]
|
18 |
-
summary = summary.replace("{", r'{').replace("}", r'}')
|
19 |
mindmap = mindmap.replace("{", r'{').replace("}", r'}')
|
20 |
citation = mistune.html(repr(citation.replace("&", "&").replace("```plaintext\n", "").replace("\n```", "").strip())[1:-1])
|
21 |
-
image = fetch_image(title,
|
22 |
html_summary = mistune.html(summary)
|
23 |
post = f"""
|
24 |
<div id="paper_post">
|
@@ -117,6 +116,10 @@ def create_post(doi, title, category, summary, mindmap, citation):
|
|
117 |
post_category = f"{category}"
|
118 |
try:
|
119 |
post_body, post_image = generate_post_html(doi, title, category, summary, mindmap, citation)
|
|
|
|
|
|
|
|
|
120 |
except Exception as e:
|
121 |
print(f"Error generating post: {e}")
|
122 |
return None, None, None, None
|
|
|
15 |
|
16 |
def generate_post_html(doi, title, category, summary, mindmap, citation):
|
17 |
doi = doi.split("https://")[-1]
|
|
|
18 |
mindmap = mindmap.replace("{", r'{').replace("}", r'}')
|
19 |
citation = mistune.html(repr(citation.replace("&", "&").replace("```plaintext\n", "").replace("\n```", "").strip())[1:-1])
|
20 |
+
image = fetch_image(title, category, imgbb_api_key)
|
21 |
html_summary = mistune.html(summary)
|
22 |
post = f"""
|
23 |
<div id="paper_post">
|
|
|
116 |
post_category = f"{category}"
|
117 |
try:
|
118 |
post_body, post_image = generate_post_html(doi, title, category, summary, mindmap, citation)
|
119 |
+
print("_____________________\n\n",title,"\n\n_____________________")
|
120 |
+
with open(f'{title.replace(" ", "_")}.html', 'w', encoding='utf-8') as f:
|
121 |
+
f.write(post_body)
|
122 |
+
exit()
|
123 |
except Exception as e:
|
124 |
print(f"Error generating post: {e}")
|
125 |
return None, None, None, None
|