Spaces:
Runtime error
Runtime error
Ahsen Khaliq
commited on
Commit
•
0ad3aac
1
Parent(s):
2eba3b6
Update app.py
Browse files
app.py
CHANGED
@@ -105,10 +105,14 @@ zs = torch.randn([10000, G.mapping.z_dim], device=device)
|
|
105 |
w_stds = G.mapping(zs, None).std(0)
|
106 |
|
107 |
|
108 |
-
def inference(text):
|
109 |
all_frames = []
|
110 |
target = clip_model.embed_text(text)
|
111 |
-
|
|
|
|
|
|
|
|
|
112 |
seed = 2
|
113 |
tf = Compose([
|
114 |
Resize(224),
|
@@ -165,7 +169,7 @@ article = "<p style='text-align: center'>colab by https://twitter.com/nshepperd1
|
|
165 |
examples = [['elon musk']]
|
166 |
gr.Interface(
|
167 |
inference,
|
168 |
-
"text",
|
169 |
[gr.outputs.Image(type="pil", label="Output"),"playable_video"],
|
170 |
title=title,
|
171 |
description=description,
|
|
|
105 |
w_stds = G.mapping(zs, None).std(0)
|
106 |
|
107 |
|
108 |
+
def inference(text,steps,image):
|
109 |
all_frames = []
|
110 |
target = clip_model.embed_text(text)
|
111 |
+
if image:
|
112 |
+
target = embed_image(image.name)
|
113 |
+
else:
|
114 |
+
target = clip_model.embed_text(text)
|
115 |
+
steps = steps
|
116 |
seed = 2
|
117 |
tf = Compose([
|
118 |
Resize(224),
|
|
|
169 |
examples = [['elon musk']]
|
170 |
gr.Interface(
|
171 |
inference,
|
172 |
+
["text",gr.inputs.Slider(minimum=50, maximum=200, step=1, default=150, label="steps"),gr.inputs.Image(type="pil", label="Image (Optional)", optional=True)],
|
173 |
[gr.outputs.Image(type="pil", label="Output"),"playable_video"],
|
174 |
title=title,
|
175 |
description=description,
|