Skip to content

Commit

Permalink
Make blocks_to_swap common
Browse files Browse the repository at this point in the history
  • Loading branch information
bmaltais committed Dec 31, 2024
1 parent 3eec4c9 commit 3c860c4
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 12 deletions.
9 changes: 9 additions & 0 deletions kohya_gui/class_advanced_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,6 +493,15 @@ def full_options_update(full_fp16, full_bf16):
value=self.config.get("advanced.vae_batch_size", 0),
step=1,
)
self.blocks_to_swap = gr.Slider(
label="Blocks to swap",
value=self.config.get("advanced.blocks_to_swap", 0),
info="The number of blocks to swap. The default is None (no swap). These options must be combined with --fused_backward_pass or --blockwise_fused_optimizers. The recommended maximum value is 36.",
minimum=0,
maximum=57,
step=1,
interactive=True,
)
with gr.Group(), gr.Row():
self.save_state = gr.Checkbox(
label="Save training state",
Expand Down
18 changes: 9 additions & 9 deletions kohya_gui/class_flux1.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,15 +202,15 @@ def noise_offset_type_change(
)

with gr.Row():
self.blocks_to_swap = gr.Slider(
label="Blocks to swap",
value=self.config.get("flux1.blocks_to_swap", 0),
info="The number of blocks to swap. The default is None (no swap). These options must be combined with --fused_backward_pass or --blockwise_fused_optimizers. The recommended maximum value is 36.",
minimum=0,
maximum=57,
step=1,
interactive=True,
)
# self.blocks_to_swap = gr.Slider(
# label="Blocks to swap",
# value=self.config.get("flux1.blocks_to_swap", 0),
# info="The number of blocks to swap. The default is None (no swap). These options must be combined with --fused_backward_pass or --blockwise_fused_optimizers. The recommended maximum value is 36.",
# minimum=0,
# maximum=57,
# step=1,
# interactive=True,
# )
self.single_blocks_to_swap = gr.Slider(
label="Single Blocks to swap (depercated)",

Check warning on line 215 in kohya_gui/class_flux1.py

View workflow job for this annotation

GitHub Actions / build

"depercated" should be "deprecated".
value=self.config.get("flux1.single_blocks_to_swap", 0),
Expand Down
2 changes: 1 addition & 1 deletion kohya_gui/dreambooth_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -1415,7 +1415,7 @@ def dreambooth_tab(
flux1_training.blockwise_fused_optimizers,
flux1_training.flux_fused_backward_pass,
flux1_training.cpu_offload_checkpointing,
flux1_training.blocks_to_swap,
advanced_training.blocks_to_swap,
flux1_training.single_blocks_to_swap,
flux1_training.double_blocks_to_swap,
flux1_training.mem_eff_save,
Expand Down
2 changes: 1 addition & 1 deletion kohya_gui/finetune_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -1550,7 +1550,7 @@ def list_presets(path):
flux1_training.blockwise_fused_optimizers,
flux1_training.flux_fused_backward_pass,
flux1_training.cpu_offload_checkpointing,
flux1_training.blocks_to_swap,
advanced_training.blocks_to_swap,
flux1_training.single_blocks_to_swap,
flux1_training.double_blocks_to_swap,
flux1_training.mem_eff_save,
Expand Down
2 changes: 1 addition & 1 deletion kohya_gui/lora_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -2859,7 +2859,7 @@ def update_LoRA_settings(
flux1_training.split_qkv,
flux1_training.train_t5xxl,
flux1_training.cpu_offload_checkpointing,
flux1_training.blocks_to_swap,
advanced_training.blocks_to_swap,
flux1_training.single_blocks_to_swap,
flux1_training.double_blocks_to_swap,
flux1_training.img_attn_dim,
Expand Down

0 comments on commit 3c860c4

Please sign in to comment.