7eu7d7 commited on
Commit
32e7bd9
·
verified ·
1 Parent(s): 73e8cee

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -11
app.py CHANGED
@@ -17,9 +17,6 @@ def infer(image, candidate_labels):
17
  clip_out = clip_detector(image, candidate_labels=candidate_labels)
18
  return postprocess(clip_out)
19
 
20
- def update_top_classes(num_classes):
21
- return
22
-
23
  with gr.Blocks() as demo:
24
  gr.Markdown("# Compare CLIP and SigLIP")
25
  gr.Markdown("Compare the performance of CLIP and SigLIP on zero-shot classification in this Space 👇")
@@ -27,11 +24,10 @@ with gr.Blocks() as demo:
27
  with gr.Column():
28
  image_input = gr.Image(type="pil")
29
  text_input = gr.Textbox(label="Input a list of labels")
30
- slider = gr.Slider(minimum=3, maximum=20, step=1, value=3, label="Number of Top Classes")
31
  run_button = gr.Button("Run", visible=True)
32
 
33
  with gr.Column():
34
- clip_output = gr.Label(label = "CLIP Output", num_top_classes=3)
35
 
36
  examples = [["./baklava.jpg", "baklava, souffle, tiramisu"]]
37
  gr.Examples(
@@ -42,12 +38,6 @@ with gr.Blocks() as demo:
42
  fn=infer,
43
  cache_examples=True
44
  )
45
- slider.change(
46
- fn=update_top_classes,
47
- inputs=slider,
48
- outputs=clip_output,
49
- _js="(i) => ({ 'num_top_classes': i })"
50
- )
51
  run_button.click(fn=infer,
52
  inputs=[image_input, text_input],
53
  outputs=[clip_output,
 
17
  clip_out = clip_detector(image, candidate_labels=candidate_labels)
18
  return postprocess(clip_out)
19
 
 
 
 
20
  with gr.Blocks() as demo:
21
  gr.Markdown("# Compare CLIP and SigLIP")
22
  gr.Markdown("Compare the performance of CLIP and SigLIP on zero-shot classification in this Space 👇")
 
24
  with gr.Column():
25
  image_input = gr.Image(type="pil")
26
  text_input = gr.Textbox(label="Input a list of labels")
 
27
  run_button = gr.Button("Run", visible=True)
28
 
29
  with gr.Column():
30
+ clip_output = gr.Label(label = "CLIP Output", num_top_classes=15)
31
 
32
  examples = [["./baklava.jpg", "baklava, souffle, tiramisu"]]
33
  gr.Examples(
 
38
  fn=infer,
39
  cache_examples=True
40
  )
 
 
 
 
 
 
41
  run_button.click(fn=infer,
42
  inputs=[image_input, text_input],
43
  outputs=[clip_output,