arcaillous-nbxl-v10

32
6
1.0B
by
Bedovyy
Image Model
OTHER
1.0B params
New
32 downloads
Early-stage
Edge AI:
Mobile
Laptop
Server
3GB+ RAM
Mobile
Laptop
Server
Quick Summary

Trained in 2steps, `Lion8bit` for quick training and `Lion` for detail.

Device Compatibility

Mobile
4-6GB RAM
Laptop
16GB RAM
Server
GPU
Minimum Recommended
1GB+ RAM

Code Examples

Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb
Trainingtext
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 8 sdxl_train.py $@ \
        --pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/noobaiXLNAIXL_epsilonPred10Version.safetensors" \
        --dataset_config="arca_nbxl.toml" \
        --output_dir="results/ckpt" --output_name="arcaillous-nbxl-v10b" \
        --save_model_as="safetensors" \
        --train_batch_size 4 --gradient_accumulation_steps 64 \
        --learning_rate=1e-5 --optimizer_type="Lion8bit" \
        --lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
        --sdpa \
        --no_half_vae \
        --cache_latents --cache_latents_to_disk \
        --gradient_checkpointing \
        --full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
        --ddp_timeout=10000000 \
        --max_train_epochs 4 --save_every_n_epochs 1 \
        --log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arca_nbxl_`date +%y%m%d-%H%M`" --logging_dir wandb

Deploy This Model

Production-ready deployment in minutes

Together.ai

Instant API access to this model

Fastest API

Production-ready inference API. Start free, scale to millions.

Try Free API

Replicate

One-click model deployment

Easiest Setup

Run models in the cloud with simple API. No DevOps required.

Deploy Now

Disclosure: We may earn a commission from these partners. This helps keep LLMYourWay free.