guy-dar commited on
Commit
32d2622
·
1 Parent(s): 19dcfbf

improve style and avoid cache mutation

Browse files
Files changed (1) hide show
  1. app.py +9 -10
app.py CHANGED
@@ -12,23 +12,22 @@ def load_model(model_name):
12
  return model, model_params, tokenizer
13
 
14
 
15
- model_name = st.selectbox("Select a model: ", options=['gpt2', 'gpt2-medium', 'gpt2-large'])
 
16
  model, model_params, tokenizer = load_model(model_name)
17
-
18
- col1, col2, *_ = st.columns(5)
19
- neuron_layer = col1.text_input("Layer: ", value='0')
20
- neuron_dim = col2.text_input("Dim: ", value='0')
21
 
22
  neurons = model_params.K_heads[int(neuron_layer), int(neuron_dim)]
23
  prompt = st.text_area("Prompt: ")
24
  submitted = st.button("Send!")
25
 
26
-
27
  if submitted:
 
28
  decoded = speaking_probe(model, model_params, tokenizer, prompt, *neurons, num_generations=1,
29
- repetition_penalty=2.,
30
- num_beams=3, min_length=1, do_sample=True,
31
- max_new_tokens=100)
32
 
33
  for text in decoded:
34
- st.code(text, language=None)
 
12
  return model, model_params, tokenizer
13
 
14
 
15
+ col1, col2, col3, *_ = st.columns(5)
16
+ model_name = col1.selectbox("Select a model: ", options=['gpt2', 'gpt2-medium', 'gpt2-large'])
17
  model, model_params, tokenizer = load_model(model_name)
18
+ neuron_layer = col2.text_input("Layer: ", value='0')
19
+ neuron_dim = col3.text_input("Dim: ", value='0')
 
 
20
 
21
  neurons = model_params.K_heads[int(neuron_layer), int(neuron_dim)]
22
  prompt = st.text_area("Prompt: ")
23
  submitted = st.button("Send!")
24
 
 
25
  if submitted:
26
+ model, model_params, tokenizer = map(deepcopy, (model, model_params, tokenizer))
27
  decoded = speaking_probe(model, model_params, tokenizer, prompt, *neurons, num_generations=1,
28
+ repetition_penalty=2., num_generations=3,
29
+ min_length=1, do_sample=True,
30
+ max_new_tokens=100)
31
 
32
  for text in decoded:
33
+ st.text(text)