ChuxiJ commited on
Commit
7403460
·
1 Parent(s): 9d632db

fix: download unified repo for shared components when using independent model repos

Browse files

XL models are in separate repos without VAE/text encoder. Ensure the
unified repo (ACE-Step/Ace-Step1.5) is downloaded first to provide
shared components (vae, Qwen3-Embedding-0.6B, silence_latent).

Files changed (1) hide show
  1. acestep/handler.py +8 -0
acestep/handler.py CHANGED
@@ -445,6 +445,14 @@ class AceStepHandler:
445
  if not os.path.exists(acestep_v15_checkpoint_path):
446
  acestep_v15_checkpoint_path = self._ensure_model_downloaded(config_path, checkpoint_dir)
447
 
 
 
 
 
 
 
 
 
448
  if os.path.exists(acestep_v15_checkpoint_path):
449
  # Determine attention implementation (prefer flash-attn3 > flash_attention_2 > sdpa)
450
  if use_flash_attention:
 
445
  if not os.path.exists(acestep_v15_checkpoint_path):
446
  acestep_v15_checkpoint_path = self._ensure_model_downloaded(config_path, checkpoint_dir)
447
 
448
+ # Ensure shared dependencies (VAE, text encoder) are available.
449
+ # Independent model repos (e.g., XL variants) only contain DiT weights.
450
+ # Download the unified repo first to get all shared components.
451
+ unified_model_path = os.path.join(checkpoint_dir, 'acestep-v15-turbo')
452
+ if not os.path.exists(unified_model_path):
453
+ logger.info('[initialize_service] Downloading unified repo for shared components (VAE, text encoder)...')
454
+ self._ensure_model_downloaded('acestep-v15-turbo', checkpoint_dir)
455
+
456
  if os.path.exists(acestep_v15_checkpoint_path):
457
  # Determine attention implementation (prefer flash-attn3 > flash_attention_2 > sdpa)
458
  if use_flash_attention: