| [model_arguments] |
| v2 = false |
| v_parameterization = false |
| pretrained_model_name_or_path = "/content/pretrained_model/AnyLoRA_noVae_fp16-pruned.ckpt" |
|
|
| [additional_network_arguments] |
| no_metadata = false |
| unet_lr = 0.0001 |
| text_encoder_lr = 5e-5 |
| network_module = "networks.lora" |
| network_dim = 16 |
| network_alpha = 8 |
| network_args = [ "conv_dim=8", "conv_alpha=1",] |
| network_train_unet_only = false |
| network_train_text_encoder_only = false |
|
|
| [optimizer_arguments] |
| min_snr_gamma = 5.0 |
| optimizer_type = "AdamW8bit" |
| learning_rate = 0.0001 |
| max_grad_norm = 1.0 |
| optimizer_args = [] |
| lr_scheduler = "constant_with_warmup" |
| lr_warmup_steps = 135 |
|
|
| [dataset_arguments] |
| cache_latents = true |
| cache_latents_to_disk = true |
| debug_dataset = false |
| vae_batch_size = 4 |
|
|
| [training_arguments] |
| output_dir = "/content/LoRA/output" |
| output_name = "simplvectorV1" |
| save_precision = "fp16" |
| save_every_n_epochs = 1 |
| train_batch_size = 2 |
| max_token_length = 225 |
| mem_eff_attn = false |
| xformers = true |
| max_train_epochs = 10 |
| max_data_loader_n_workers = 8 |
| persistent_data_loader_workers = true |
| gradient_accumulation_steps = 1 |
| mixed_precision = "fp16" |
| clip_skip = 2 |
| lowram = true |
|
|
| [logging_arguments] |
| log_with = "tensorboard" |
| logging_dir = "/content/LoRA/logs" |
| log_prefix = "simplvectorV1" |
|
|
| [sample_prompt_arguments] |
| sample_every_n_epochs = 1 |
| sample_sampler = "k_dpm_2_a" |
|
|
| [dreambooth_arguments] |
| prior_loss_weight = 1.0 |
|
|
| [saving_arguments] |
| save_model_as = "safetensors" |
|
|