From 4155b51d1fa9daac1a4eb29cb5b94f86336d99a8 Mon Sep 17 00:00:00 2001 From: Patrick Esser Date: Tue, 14 Jun 2022 14:42:11 +0000 Subject: [PATCH] improve efficiency for upscaler --- .../txt2img-upscale-clip-encoder-f16-1024.yaml | 13 ++++++++----- ldm/modules/attention.py | 6 +++--- main.py | 3 +++ 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/configs/stable-diffusion/txt2img-upscale-clip-encoder-f16-1024.yaml b/configs/stable-diffusion/txt2img-upscale-clip-encoder-f16-1024.yaml index 325abc2..053238d 100644 --- a/configs/stable-diffusion/txt2img-upscale-clip-encoder-f16-1024.yaml +++ b/configs/stable-diffusion/txt2img-upscale-clip-encoder-f16-1024.yaml @@ -1,5 +1,5 @@ model: - base_learning_rate: 1.0e-04 + base_learning_rate: 5.0e-05 target: ldm.models.diffusion.ddpm.LatentUpscaleDiffusion params: low_scale_key: "lr" @@ -66,10 +66,11 @@ model: image_size: 64 # not really needed in_channels: 20 out_channels: 16 - model_channels: 192 - attention_resolutions: [ 4, 2, 1 ] + model_channels: 96 + attention_resolutions: [ 8, 4, 2 ] # -> at 32, 16, 8 num_res_blocks: 2 - channel_mult: [ 1, 2, 4, 4 ] + channel_mult: [ 1, 2, 4, 8, 8 ] + # -> res, ds: (64, 1), (32, 2), (16, 4), (8, 8), (4, 16) num_heads: 8 use_spatial_transformer: True transformer_depth: 1 @@ -105,7 +106,7 @@ data: target: ldm.data.laion.WebDataModuleFromConfig params: tar_base: "pipe:aws s3 cp s3://s-datasets/laion-high-resolution/" - batch_size: 8 + batch_size: 10 num_workers: 4 train: shards: '{00000..17279}.tar -' @@ -143,6 +144,8 @@ data: factor: 4 lightning: + find_unused_parameters: False + callbacks: image_logger: target: main.ImageLogger diff --git a/ldm/modules/attention.py b/ldm/modules/attention.py index f4eff39..f0f99c4 100644 --- a/ldm/modules/attention.py +++ b/ldm/modules/attention.py @@ -253,9 +253,9 @@ class SpatialTransformer(nn.Module): x_in = x x = self.norm(x) x = self.proj_in(x) - x = rearrange(x, 'b c h w -> b (h w) c') + x = rearrange(x, 'b c h w -> b (h w) c').contiguous() for block in self.transformer_blocks: x = block(x, context=context) - x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w) + x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w).contiguous() x = self.proj_out(x) - return x + x_in \ No newline at end of file + return x + x_in diff --git a/main.py b/main.py index 10624d6..e8946a5 100644 --- a/main.py +++ b/main.py @@ -759,6 +759,9 @@ if __name__ == "__main__": del callbacks_cfg['ignore_keys_callback'] trainer_kwargs["callbacks"] = [instantiate_from_config(callbacks_cfg[k]) for k in callbacks_cfg] + if not lightning_config.get("find_unused_parameters", True): + from pytorch_lightning.plugins import DDPPlugin + trainer_kwargs["plugins"] = DDPPlugin(find_unused_parameters=False) trainer = Trainer.from_argparse_args(trainer_opt, **trainer_kwargs) trainer.logdir = logdir ###