File size: 3,772 Bytes
84ea29f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
# Main real-world inference configuration
seed: 42
max_layer_num: 52

# Size configuration
source_size: 1024
target_size: 1024

# Real-world inference defaults
data_dir: "/project/llmsvgen/share/data/kmw_layered_dataset/real_world_inference"
image_dir: "/project/llmsvgen/share/data/kmw_layered_dataset/real_world_inference/layers_real_test_1024"
test_jsonl: "/project/llmsvgen/share/data/kmw_layered_dataset/real_world_inference/caption_bbox_infer.jsonl"

# Model paths
pretrained_model_name_or_path: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_checkpoints/FLUX.1-dev"
pretrained_adapter_path: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_checkpoints/FLUX.1-dev-Controlnet-Inpainting-Alpha"
transp_vae_path: "ckpt/trans_vae/0008000.pt"

# Pre-trained LoRA weights
pretrained_lora_dir: "ckpt/pre_trained_LoRA"
artplus_lora_dir: "ckpt/prism_ft_LoRA"

# below is for 18k dataset
#lora_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_18k/step_90000/transformer"
#layer_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_18k/step_90000"
#adapter_lora_dir: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_18k/step_90000/adapter"

# below is for 20k dataset
#lora_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_20k/step_120000/transformer"
#layer_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_20k/step_120000"
#adapter_lora_dir: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_20k/step_120000/adapter"

# below is for 30k dataset
#lora_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_30k/step_150000/transformer"
#layer_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_30k/step_150000"
#adapter_lora_dir: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_30k/step_150000/adapter"

# below is for 40k dataset
#lora_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_40k/step_250000/transformer"
#layer_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_40k/step_250000"
#adapter_lora_dir: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_40k/step_250000/adapter"

# below is for 50k dataset
#lora_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_50k/step_200000/transformer"
#layer_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_50k/step_200000"
#adapter_lora_dir: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_train_dataset/ckpt_prism_scaleup_1024_50k/step_200000/adapter"

# unified real-world decomposition checkpoint
lora_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_ckpt/step_120000/transformer"
layer_ckpt: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_ckpt/step_120000"
adapter_lora_dir: "/project/llmsvgen/share/data/kmw_layered_checkpoint/SynLayers_ckpt/step_120000/adapter"

# Inference settings
cfg: 4.0
adapter_scale: 0.9
max_sequence_length: 1024

save_dir: "/project/llmsvgen/share/data/kmw_layered_dataset/real_world_inference/results"
#run_name: "step_120000"  # optional manual override

# Sample range control (1-based indexing)
start_idx: 1
#end_idx: 147
#max_samples: 147