kcz358 commited on
Commit
c64cd4d
Β·
1 Parent(s): ad728df

Update basic changes

Browse files
Files changed (4) hide show
  1. .gitignore +2 -0
  2. Makefile +13 -0
  3. app.py +32 -17
  4. requirements.txt +3 -1
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+
2
+ __pycache__
Makefile ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .PHONY: style format
2
+
3
+
4
+ style:
5
+ python -m black --line-length 119 .
6
+ python -m isort .
7
+ ruff check --fix .
8
+
9
+
10
+ quality:
11
+ python -m black --check --line-length 119 .
12
+ python -m isort --check-only .
13
+ ruff check .
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
 
3
 
4
  """
5
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
@@ -40,24 +41,38 @@ def respond(
40
  yield response
41
 
42
 
 
 
 
 
 
 
 
 
 
 
43
  """
44
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
- """
46
- demo = gr.ChatInterface(
47
- respond,
48
- additional_inputs=[
49
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
50
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
51
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
52
- gr.Slider(
53
- minimum=0.1,
54
- maximum=1.0,
55
- value=0.95,
56
- step=0.05,
57
- label="Top-p (nucleus sampling)",
58
- ),
59
- ],
60
- )
 
 
 
 
61
 
62
 
63
  if __name__ == "__main__":
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
+ from sae_auto_interp.sae import Sae
4
 
5
  """
6
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
 
41
  yield response
42
 
43
 
44
+ CITATION_BUTTON_TEXT = """
45
+ @misc{zhang2024largemultimodalmodelsinterpret,
46
+ title={Large Multi-modal Models Can Interpret Features in Large Multi-modal Models},
47
+ author={Kaichen Zhang and Yifei Shen and Bo Li and Ziwei Liu},
48
+ year={2024},
49
+ eprint={2411.14982},
50
+ archivePrefix={arXiv},
51
+ primaryClass={cs.CV},
52
+ url={https://arxiv.org/abs/2411.14982},
53
+ }
54
  """
55
+
56
+
57
+ with gr.Blocks() as demo:
58
+ gr.Markdown(
59
+ """
60
+ # Large Multi-modal Models Can Interpret Features in Large Multi-modal Models
61
+
62
+ πŸ” [ArXiv Paper](https://arxiv.org/abs/2411.14982) | 🏠 [LMMs-Lab Homepage](https://lmms-lab.framer.ai) | πŸ€— [Huggingface Collections](https://huggingface.co/collections/lmms-lab/llava-sae-674026e4e7bc8c29c70bc3a3)
63
+ """
64
+ )
65
+
66
+ with gr.Tabs(elem_classes="tab-buttons") as tabs:
67
+ with gr.TabItem("Visualization of Activations", elem_id="visualization", id=0):
68
+ image = gr.Image()
69
+
70
+ with gr.TabItem("Steering Model", elem_id="steering", id=2):
71
+ chatbot = gr.Chatbot()
72
+
73
+ with gr.Row():
74
+ with gr.Accordion("πŸ“™ Citation", open=False):
75
+ gr.Markdown("```bib\n" + CITATION_BUTTON_TEXT + "\n```")
76
 
77
 
78
  if __name__ == "__main__":
requirements.txt CHANGED
@@ -1 +1,3 @@
1
- huggingface_hub==0.25.2
 
 
 
1
+ huggingface_hub==0.25.2
2
+ gradio
3
+ sae_auto_interp @ git+https://github.com/EvolvingLMMs-Lab/multimodal-sae