John6666 commited on
Commit
def1593
·
verified ·
1 Parent(s): ee3e0ca

Upload convert_url_to_diffusers_multi_gr.py

Browse files
convert_url_to_diffusers_multi_gr.py CHANGED
@@ -293,7 +293,7 @@ def load_and_save_pipeline(pipe, model_type: str, url: str, new_file: str, new_d
293
  pipe.save_pretrained(new_dir)
294
  except Exception as e:
295
  print(f"Failed to load pipeline. {e}")
296
- raise Exception("Failed to load pipeline.") from e
297
  finally:
298
  return pipe
299
 
@@ -305,20 +305,22 @@ def convert_url_to_diffusers(url: str, civitai_key: str="", is_upload_sf: bool=F
305
  hf_token = get_token()
306
  progress(0, desc="Start converting...")
307
  temp_dir = TEMP_DIR
 
308
 
309
  if is_repo_name(url) and is_repo_exists(url):
310
  new_file = url
311
  model_type = mtype
312
  else:
313
  new_file = get_download_file(temp_dir, url, civitai_key)
314
- if not new_file or Path(new_file).suffix not in set([".safetensors", ".ckpt", ".bin", ".sft"]): raise Exception(f"Safetensors file not found: {url}")
 
 
315
  model_type = get_model_type_from_key(new_file)
316
  new_dir = Path(new_file).stem.replace(" ", "_").replace(",", "_").replace(".", "_") #
317
 
318
  kwargs = {}
319
  dkwargs = {}
320
  if dtype != DTYPE_DEFAULT: dkwargs["torch_dtype"] = get_process_dtype(dtype, model_type)
321
- pipe = None
322
 
323
  print(f"Model type: {model_type} / VAE: {vae} / CLIP: {clip} / T5: {t5} / Scheduler: {scheduler} / dtype: {dtype} / EMA: {ema} / Base repo: {base_repo} / LoRAs: {lora_dict}")
324
 
@@ -387,7 +389,7 @@ def convert_url_to_diffusers(url: str, civitai_key: str="", is_upload_sf: bool=F
387
  return new_dir
388
  except Exception as e:
389
  print(f"Failed to convert. {e}")
390
- raise Exception("Failed to convert.") from e
391
  finally:
392
  del pipe
393
  torch.cuda.empty_cache()
 
293
  pipe.save_pretrained(new_dir)
294
  except Exception as e:
295
  print(f"Failed to load pipeline. {e}")
296
+ raise Exception(f"Failed to load pipeline. {e}") from e
297
  finally:
298
  return pipe
299
 
 
305
  hf_token = get_token()
306
  progress(0, desc="Start converting...")
307
  temp_dir = TEMP_DIR
308
+ pipe = None
309
 
310
  if is_repo_name(url) and is_repo_exists(url):
311
  new_file = url
312
  model_type = mtype
313
  else:
314
  new_file = get_download_file(temp_dir, url, civitai_key)
315
+ if not new_file or Path(new_file).suffix.lower() not in set([".safetensors", ".ckpt", ".bin", ".sft"]):
316
+ safe_clean(new_file)
317
+ raise Exception(f"Safetensors file not found: {url}")
318
  model_type = get_model_type_from_key(new_file)
319
  new_dir = Path(new_file).stem.replace(" ", "_").replace(",", "_").replace(".", "_") #
320
 
321
  kwargs = {}
322
  dkwargs = {}
323
  if dtype != DTYPE_DEFAULT: dkwargs["torch_dtype"] = get_process_dtype(dtype, model_type)
 
324
 
325
  print(f"Model type: {model_type} / VAE: {vae} / CLIP: {clip} / T5: {t5} / Scheduler: {scheduler} / dtype: {dtype} / EMA: {ema} / Base repo: {base_repo} / LoRAs: {lora_dict}")
326
 
 
389
  return new_dir
390
  except Exception as e:
391
  print(f"Failed to convert. {e}")
392
+ raise Exception(f"Failed to convert. {e}") from e
393
  finally:
394
  del pipe
395
  torch.cuda.empty_cache()