improve efficiency for upscaler

This commit is contained in:
Patrick Esser 2022-06-14 14:42:11 +00:00 committed by root
parent fe081960ca
commit 4155b51d1f
3 changed files with 14 additions and 8 deletions

View File

@ -1,5 +1,5 @@
model:
base_learning_rate: 1.0e-04
base_learning_rate: 5.0e-05
target: ldm.models.diffusion.ddpm.LatentUpscaleDiffusion
params:
low_scale_key: "lr"
@ -66,10 +66,11 @@ model:
image_size: 64 # not really needed
in_channels: 20
out_channels: 16
model_channels: 192
attention_resolutions: [ 4, 2, 1 ]
model_channels: 96
attention_resolutions: [ 8, 4, 2 ] # -> at 32, 16, 8
num_res_blocks: 2
channel_mult: [ 1, 2, 4, 4 ]
channel_mult: [ 1, 2, 4, 8, 8 ]
# -> res, ds: (64, 1), (32, 2), (16, 4), (8, 8), (4, 16)
num_heads: 8
use_spatial_transformer: True
transformer_depth: 1
@ -105,7 +106,7 @@ data:
target: ldm.data.laion.WebDataModuleFromConfig
params:
tar_base: "pipe:aws s3 cp s3://s-datasets/laion-high-resolution/"
batch_size: 8
batch_size: 10
num_workers: 4
train:
shards: '{00000..17279}.tar -'
@ -143,6 +144,8 @@ data:
factor: 4
lightning:
find_unused_parameters: False
callbacks:
image_logger:
target: main.ImageLogger

View File

@ -253,9 +253,9 @@ class SpatialTransformer(nn.Module):
x_in = x
x = self.norm(x)
x = self.proj_in(x)
x = rearrange(x, 'b c h w -> b (h w) c')
x = rearrange(x, 'b c h w -> b (h w) c').contiguous()
for block in self.transformer_blocks:
x = block(x, context=context)
x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w)
x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w).contiguous()
x = self.proj_out(x)
return x + x_in
return x + x_in

View File

@ -759,6 +759,9 @@ if __name__ == "__main__":
del callbacks_cfg['ignore_keys_callback']
trainer_kwargs["callbacks"] = [instantiate_from_config(callbacks_cfg[k]) for k in callbacks_cfg]
if not lightning_config.get("find_unused_parameters", True):
from pytorch_lightning.plugins import DDPPlugin
trainer_kwargs["plugins"] = DDPPlugin(find_unused_parameters=False)
trainer = Trainer.from_argparse_args(trainer_opt, **trainer_kwargs)
trainer.logdir = logdir ###