Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -62,7 +62,7 @@ def process_model(model_id, q_method, latest, maintainer, oauth_token: gr.OAuthT
|
|
62 |
|
63 |
f = open("{model_file}", "w")
|
64 |
print(f.write("From {model_id}"))
|
65 |
-
ollama_conversion = f"ollama create
|
66 |
|
67 |
|
68 |
ollama_conversion_result = subprocess.run(ollama_conversion, shell=True, capture_output=True)
|
@@ -71,9 +71,9 @@ def process_model(model_id, q_method, latest, maintainer, oauth_token: gr.OAuthT
|
|
71 |
raise Exception(f"Error converting to Ollama: {ollama_conversion_result.stderr}")
|
72 |
print("Model converted to Ollama successfully!")
|
73 |
if maintainer == True:
|
74 |
-
ollama_push = f"ollama push
|
75 |
else:
|
76 |
-
ollama_push = f"ollama push
|
77 |
ollama_push_result = subprocess.run(ollama_push, shell=True, capture_output=True)
|
78 |
print(ollama_push_result)
|
79 |
if ollama_push_result.returncode != 0:
|
@@ -81,16 +81,16 @@ def process_model(model_id, q_method, latest, maintainer, oauth_token: gr.OAuthT
|
|
81 |
print("Model pushed to Ollama library successfully!")
|
82 |
|
83 |
if latest == True:
|
84 |
-
ollama_copy = f"ollama cp
|
85 |
ollama_copy_result = subprocess.run(ollama_copy, shell=True, capture_output=True)
|
86 |
print(ollama_copy_result)
|
87 |
if ollama_copy_result.returncode != 0:
|
88 |
raise Exception(f"Error converting to Ollama: {ollama_push_result.stderr}")
|
89 |
print("Model pushed to Ollama library successfully!")
|
90 |
if maintainer == True:
|
91 |
-
llama_push_latest = f"ollama push
|
92 |
else:
|
93 |
-
ollama_push_latest = f"ollama push
|
94 |
ollama_push_latest_result = subprocess.run(ollama_push_latest, shell=True, capture_output=True)
|
95 |
print(ollama_push_latest_result)
|
96 |
if ollama_push_latest_result.returncode != 0:
|
|
|
62 |
|
63 |
f = open("{model_file}", "w")
|
64 |
print(f.write("From {model_id}"))
|
65 |
+
ollama_conversion = f"ollama create -f {model_file} {OLLAMA_USERNAME}/{ollama_model_name}:{q_method}"
|
66 |
|
67 |
|
68 |
ollama_conversion_result = subprocess.run(ollama_conversion, shell=True, capture_output=True)
|
|
|
71 |
raise Exception(f"Error converting to Ollama: {ollama_conversion_result.stderr}")
|
72 |
print("Model converted to Ollama successfully!")
|
73 |
if maintainer == True:
|
74 |
+
ollama_push = f"ollama push {OLLAMA_USERNAME}/{model_name}:{q_method}"
|
75 |
else:
|
76 |
+
ollama_push = f"ollama push {OLLAMA_USERNAME}/{ollama_model_name}:{q_method}"
|
77 |
ollama_push_result = subprocess.run(ollama_push, shell=True, capture_output=True)
|
78 |
print(ollama_push_result)
|
79 |
if ollama_push_result.returncode != 0:
|
|
|
81 |
print("Model pushed to Ollama library successfully!")
|
82 |
|
83 |
if latest == True:
|
84 |
+
ollama_copy = f"ollama cp {OLLAMA_USERNAME}/{model_id}:{q_method} {OLLAMA_USERNAME}/{model_id}:latest"
|
85 |
ollama_copy_result = subprocess.run(ollama_copy, shell=True, capture_output=True)
|
86 |
print(ollama_copy_result)
|
87 |
if ollama_copy_result.returncode != 0:
|
88 |
raise Exception(f"Error converting to Ollama: {ollama_push_result.stderr}")
|
89 |
print("Model pushed to Ollama library successfully!")
|
90 |
if maintainer == True:
|
91 |
+
llama_push_latest = f"ollama push {OLLAMA_USERNAME}/{model_name}:latest"
|
92 |
else:
|
93 |
+
ollama_push_latest = f"ollama push {OLLAMA_USERNAME}/{ollama_model_name}:latest"
|
94 |
ollama_push_latest_result = subprocess.run(ollama_push_latest, shell=True, capture_output=True)
|
95 |
print(ollama_push_latest_result)
|
96 |
if ollama_push_latest_result.returncode != 0:
|