CivArchive
    their | vp | - v2
    Preview 1

    [network_arguments]

    unet_lr = 8e-5

    text_encoder_lr = 5e-5

    network_dim = 32

    network_alpha = 16

    network_module = "networks.lora"

    network_train_unet_only = false

    [optimizer_arguments]

    learning_rate = 8e-5

    lr_scheduler = "cosine_with_restarts"

    lr_scheduler_num_cycles = 3

    lr_warmup_steps = 17

    optimizer_type = "LoraEasyCustomOptimizer.came.CAME"

    optimizer_args = [ "weight_decay=0.04",]

    loss_type = "l2"

    max_grad_norm = 1.0

    [training_arguments]

    lowram = true

    pretrained_model_name_or_path = "/content/downloaded_model.safetensors"

    vae = "/content/sdxl_vae.safetensors"

    max_train_epochs = 8

    train_batch_size = 4

    seed = 42

    max_token_length = 225

    xformers = false

    sdpa = true

    no_half_vae = true

    gradient_checkpointing = true

    gradient_accumulation_steps = 1

    max_data_loader_n_workers = 1

    persistent_data_loader_workers = true

    mixed_precision = "fp16"

    full_fp16 = true

    full_bf16 = false

    cache_latents = true

    cache_latents_to_disk = true

    cache_text_encoder_outputs = false

    min_timestep = 0

    max_timestep = 1000

    prior_loss_weight = 1.0

    multires_noise_iterations = 6

    multires_noise_discount = 0.3

    v_parameterization = true

    scale_v_pred_loss_like_noise_pred = true

    zero_terminal_snr = true

    [saving_arguments]

    save_precision = "fp16"

    save_model_as = "safetensors"

    save_every_n_epochs = 1

    save_last_n_epochs = 8

    output_name = "anyillu"

    output_dir = "/content/drive/MyDrive/Loras/anyillu/output"

    log_prefix = "anyillu"

    logging_dir = "/content/drive/MyDrive/Loras/_logs"

    Description