Tonic commited on
Commit
73d3004
·
verified ·
1 Parent(s): ec3aa65

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -2
app.py CHANGED
@@ -61,6 +61,8 @@ model = AutoModelForCausalLM.from_pretrained(
61
  trust_remote_code=True
62
  )
63
 
 
 
64
  def format_model_info(config):
65
  info = []
66
  important_keys = [
@@ -69,9 +71,12 @@ def format_model_info(config):
69
  ]
70
  for key in important_keys:
71
  if key in config:
72
- info.append(f"**{key}:** {config[key]}")
 
 
 
 
73
  return "\n".join(info)
74
-
75
  @spaces.GPU
76
  def generate_response(system_prompt, user_prompt, temperature, max_new_tokens, top_p, repetition_penalty, top_k):
77
  # Construct the full prompt with system and user messages
 
61
  trust_remote_code=True
62
  )
63
 
64
+ config_json = model.config.to_dict()
65
+
66
  def format_model_info(config):
67
  info = []
68
  important_keys = [
 
71
  ]
72
  for key in important_keys:
73
  if key in config:
74
+ value = config[key]
75
+ # Convert torch_dtype to string representation if it exists
76
+ if key == "torch_dtype" and hasattr(value, "name"):
77
+ value = value.name
78
+ info.append(f"**{key}:** {value}")
79
  return "\n".join(info)
 
80
  @spaces.GPU
81
  def generate_response(system_prompt, user_prompt, temperature, max_new_tokens, top_p, repetition_penalty, top_k):
82
  # Construct the full prompt with system and user messages