From 34f9f3867ed8143c6b5f07e140c224fd6f1a7654 Mon Sep 17 00:00:00 2001 From: rromb Date: Fri, 10 Jun 2022 12:27:35 +0200 Subject: [PATCH] make it ready --- .../txt2img-multinode-clip-encoder-f16-256-pretraining.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/configs/stable-diffusion/txt2img-multinode-clip-encoder-f16-256-pretraining.yaml b/configs/stable-diffusion/txt2img-multinode-clip-encoder-f16-256-pretraining.yaml index 19b498f..68c633a 100644 --- a/configs/stable-diffusion/txt2img-multinode-clip-encoder-f16-256-pretraining.yaml +++ b/configs/stable-diffusion/txt2img-multinode-clip-encoder-f16-256-pretraining.yaml @@ -70,7 +70,7 @@ data: target: ldm.data.laion.WebDataModuleFromConfig params: tar_base: "pipe:aws s3 cp s3://s-datasets/laion5b/laion2B-data/" - batch_size: 50 # TODO: max out + batch_size: 55 num_workers: 4 multinode: True min_size: 256 # TODO: experiment. Note: for 2B, images are stored at max 384 resolution @@ -124,4 +124,4 @@ lightning: benchmark: True val_check_interval: 5000000 # really sorry num_sanity_val_steps: 0 - accumulate_grad_batches: 2 # TODO: want accumulate on? --> wait for final batch-size + accumulate_grad_batches: 2