LanguageBind's picture
Upload flowsdvae_50kx512_lgnm0p5/log.txt with huggingface_hub
1871b09 verified
[2025-02-26 18:56:16] Model: DistributedDataParallel(
(module): FlowAE(
(flow): FlowDecoder(
(conv_in): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(mid): Module(
(block_1): ResnetBlock(
(norm1): RMSNorm()
(conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(temb_proj): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
(norm2): RMSNorm()
(dropout): Dropout(p=0.0, inplace=False)
(conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
)
(attn_1): AttnBlock(
(norm): RMSNorm()
(q): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
(k): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
(v): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
(proj_out): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
)
(block_2): ResnetBlock(
(norm1): RMSNorm()
(conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(temb_proj): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
(norm2): RMSNorm()
(dropout): Dropout(p=0.0, inplace=False)
(conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
)
)
(up): ModuleList(
(0): Module(
(block): ModuleList(
(0): ResnetBlock(
(norm1): RMSNorm()
(conv1): Conv2d(256, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(temb_proj): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1))
(norm2): RMSNorm()
(dropout): Dropout(p=0.0, inplace=False)
(conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(nin_shortcut): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1))
)
(1-2): 2 x ResnetBlock(
(norm1): RMSNorm()
(conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(temb_proj): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1))
(norm2): RMSNorm()
(dropout): Dropout(p=0.0, inplace=False)
(conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
)
)
(attn): ModuleList()
)
(1): Module(
(block): ModuleList(
(0): ResnetBlock(
(norm1): RMSNorm()
(conv1): Conv2d(512, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(temb_proj): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1))
(norm2): RMSNorm()
(dropout): Dropout(p=0.0, inplace=False)
(conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(nin_shortcut): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1))
)
(1-2): 2 x ResnetBlock(
(norm1): RMSNorm()
(conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(temb_proj): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1))
(norm2): RMSNorm()
(dropout): Dropout(p=0.0, inplace=False)
(conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
)
)
(attn): ModuleList()
(upsample): Upsample(
(conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
)
)
(2-3): 2 x Module(
(block): ModuleList(
(0-2): 3 x ResnetBlock(
(norm1): RMSNorm()
(conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(temb_proj): Conv2d(512, 512, kernel_size=(1, 1), stride=(1, 1))
(norm2): RMSNorm()
(dropout): Dropout(p=0.0, inplace=False)
(conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
)
)
(attn): ModuleList()
(upsample): Upsample(
(conv): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
)
)
)
(norm_out): RMSNorm()
(conv_out): Conv2d(128, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
)
(t_embedder): TimestepEmbedder(
(mlp): Sequential(
(0): Linear(in_features=256, out_features=512, bias=True)
(1): SiLU()
(2): Linear(in_features=512, out_features=512, bias=True)
)
)
(y_embedder): Conv2d(4, 512, kernel_size=(1, 1), stride=(1, 1))
(x_embedder): PatchEmbed(
(proj): Conv2d(3, 512, kernel_size=(8, 8), stride=(8, 8))
(norm): Identity()
)
)
)
[2025-02-26 18:56:16] FlowVAE Parameters: 55.53M
[2025-02-26 18:56:16] FlowVAE Trainable Parameters: 55.01M
[2025-02-26 18:56:16] Optimizer: AdamW, lr=0.0002, beta2=0.95
[2025-02-26 18:56:16] module.pos_embed.requires_grad : False
[2025-02-26 18:56:16] module.flow.conv_in.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.conv_in.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_1.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_1.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_1.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_1.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_1.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_1.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_1.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_1.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.attn_1.norm.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.attn_1.q.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.attn_1.q.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.attn_1.k.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.attn_1.k.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.attn_1.v.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.attn_1.v.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.attn_1.proj_out.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.attn_1.proj_out.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_2.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_2.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_2.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_2.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_2.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_2.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_2.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.mid.block_2.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.0.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.0.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.0.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.0.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.0.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.0.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.0.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.0.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.0.nin_shortcut.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.0.nin_shortcut.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.1.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.1.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.1.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.1.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.1.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.1.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.1.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.1.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.2.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.2.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.2.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.2.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.2.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.2.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.2.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.0.block.2.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.0.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.0.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.0.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.0.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.0.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.0.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.0.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.0.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.0.nin_shortcut.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.0.nin_shortcut.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.1.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.1.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.1.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.1.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.1.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.1.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.1.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.1.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.2.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.2.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.2.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.2.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.2.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.2.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.2.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.block.2.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.upsample.conv.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.1.upsample.conv.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.0.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.0.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.0.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.0.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.0.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.0.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.0.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.0.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.1.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.1.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.1.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.1.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.1.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.1.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.1.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.1.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.2.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.2.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.2.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.2.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.2.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.2.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.2.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.block.2.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.upsample.conv.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.2.upsample.conv.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.0.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.0.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.0.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.0.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.0.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.0.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.0.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.0.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.1.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.1.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.1.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.1.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.1.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.1.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.1.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.1.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.2.norm1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.2.conv1.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.2.conv1.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.2.temb_proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.2.temb_proj.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.2.norm2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.2.conv2.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.block.2.conv2.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.upsample.conv.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.up.3.upsample.conv.bias.requires_grad : True
[2025-02-26 18:56:16] module.flow.norm_out.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.conv_out.weight.requires_grad : True
[2025-02-26 18:56:16] module.flow.conv_out.bias.requires_grad : True
[2025-02-26 18:56:16] module.t_embedder.mlp.0.weight.requires_grad : True
[2025-02-26 18:56:16] module.t_embedder.mlp.0.bias.requires_grad : True
[2025-02-26 18:56:16] module.t_embedder.mlp.2.weight.requires_grad : True
[2025-02-26 18:56:16] module.t_embedder.mlp.2.bias.requires_grad : True
[2025-02-26 18:56:16] module.y_embedder.weight.requires_grad : True
[2025-02-26 18:56:16] module.y_embedder.bias.requires_grad : True
[2025-02-26 18:56:16] module.x_embedder.proj.weight.requires_grad : True
[2025-02-26 18:56:16] module.x_embedder.proj.bias.requires_grad : True
[2025-02-26 18:56:17] Dataset contains 1,281,168 images /data/checkpoints/LanguageBind/offline_feature/offline_sdvae_256_path/imagenet_train_256
[2025-02-26 18:56:17] Batch size 64 per gpu, with 512 global batch size
[2025-02-26 18:56:17] Train config: {'ckpt_path': '/data/logs/flow/flowsdvae_50kx512_lgnm0p5/checkpoints/0050000.pt', 'data': {'raw_data_dir': '/data/OpenDataLab___ImageNet-1K/raw/ImageNet-1K/train', 'raw_val_data_dir': '/data/OpenDataLab___ImageNet-1K/raw/ImageNet-1K/val', 'data_path': '/data/checkpoints/LanguageBind/offline_feature/offline_sdvae_256_path/imagenet_train_256', 'fid_reference_file': '/data/checkpoints/VIRTUAL_imagenet256_labeled.npz', 'image_size': 256, 'num_classes': 1000, 'num_workers': 16, 'latent_norm': False, 'latent_multiplier': 0.18215}, 'vae': {'vae_type': 'FlowSDVAE', 'model_path': '/data/checkpoints/stabilityai/sd-vae-ft-ema/vae-ft-ema-560000-ema-pruned.safetensors', 'downsample_ratio': 8, 'multi_latent': False, 'add_y_to_x': False, 'norm_type': 'rmsnorm'}, 'model': {'model_type': 'DiT-S/2', 'use_qknorm': True, 'use_swiglu': True, 'use_rope': True, 'use_rmsnorm': True, 'in_chans': 4, 'use_checkpoint': False}, 'train': {'max_steps': 50000, 'global_batch_size': 512, 'global_seed': 0, 'output_dir': '../logs/flow/flowsdvae_50kx512_lgnm0p5', 'ckpt': None, 'log_every': 50, 'ckpt_every': 10000, 'eval_every': 10000, 'wandb': True, 'seed': 1234, 'precision': 'bf16', 'resume': False}, 'optimizer': {'lr': 0.0002, 'beta2': 0.95}, 'wandb': {'proj_name': 'flow', 'log_name': 'flowsdvae_50kx512_lgnm0p5', 'key': '953e958793b218efb850fa194e85843e2c3bd88b'}, 'scheduler': {'diffusion': False, 'transport': True}, 'diffusion': {'learn_sigma': True, 'diffusion_steps': 1000}, 'transport': {'path_type': 'Linear', 'prediction': 'velocity', 'loss_weight': None, 'sample_eps': None, 'train_eps': None, 'use_cosine_loss': True, 'use_lognorm': True}, 'sample': {'mode': 'ODE', 'sampling_method': 'euler', 'atol': 1e-06, 'rtol': 0.001, 'reverse': False, 'likelihood': False, 'num_sampling_steps': 250, 'cfg_scale': 1.0, 'per_proc_batch_size': 64, 'fid_num': 50000, 'cfg_interval_start': 0.0, 'timestep_shift': 0.0}, 'flowvae_transport': {'path_type': 'Linear', 'prediction': 'velocity', 'loss_weight': None, 'sample_eps': None, 'train_eps': None, 'use_cosine_loss': False, 'use_lognorm': True, 'l2_loss': True, 'shift_lg': True, 'shifted_mu': -0.5, 'timestep_sampling': 'lognorm', 'beta_alpha': None, 'beta_beta': None, 'pareto_alpha': None}, 'flowvae_sample': {'mode': 'ODE', 'sampling_method': 'euler', 'atol': 1e-06, 'rtol': 0.001, 'reverse': False, 'likelihood': False, 'num_sampling_steps': 25, 'cfg_scale': 1.0, 'per_proc_batch_size': 64, 'fid_num': 50000, 'cfg_interval_start': 0.0, 'timestep_shift': 0.0}}
[2025-02-26 18:57:45] (step=0000050) Train Loss: 1.1629, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.57, Grad Norm: 3.2443
[2025-02-26 18:58:43] (step=0000100) Train Loss: 1.0715, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9357
[2025-02-26 18:59:41] (step=0000150) Train Loss: 1.0476, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6402
[2025-02-26 19:00:39] (step=0000200) Train Loss: 1.0345, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3709
[2025-02-26 19:01:38] (step=0000250) Train Loss: 1.0280, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4170
[2025-02-26 19:02:36] (step=0000300) Train Loss: 1.0225, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4500
[2025-02-26 19:03:34] (step=0000350) Train Loss: 1.0107, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4733
[2025-02-26 19:04:32] (step=0000400) Train Loss: 0.9939, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5135
[2025-02-26 19:05:30] (step=0000450) Train Loss: 0.9489, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5852
[2025-02-26 19:06:28] (step=0000500) Train Loss: 0.8884, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8405
[2025-02-26 19:07:26] (step=0000550) Train Loss: 0.7962, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2389
[2025-02-26 19:08:25] (step=0000600) Train Loss: 0.7199, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1532
[2025-02-26 19:09:23] (step=0000650) Train Loss: 0.6507, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1803
[2025-02-26 19:10:21] (step=0000700) Train Loss: 0.5823, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2698
[2025-02-26 19:11:19] (step=0000750) Train Loss: 0.5187, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.4629
[2025-02-26 19:12:17] (step=0000800) Train Loss: 0.4757, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.3941
[2025-02-26 19:13:15] (step=0000850) Train Loss: 0.4401, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.6051
[2025-02-26 19:14:13] (step=0000900) Train Loss: 0.3983, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.4758
[2025-02-26 19:15:11] (step=0000950) Train Loss: 0.3570, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.6150
[2025-02-26 19:16:10] (step=0001000) Train Loss: 0.3249, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.6819
[2025-02-26 19:17:08] (step=0001050) Train Loss: 0.3055, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.5646
[2025-02-26 19:18:06] (step=0001100) Train Loss: 0.2782, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.6182
[2025-02-26 19:19:04] (step=0001150) Train Loss: 0.2662, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.6245
[2025-02-26 19:20:02] (step=0001200) Train Loss: 0.2575, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.7183
[2025-02-26 19:21:00] (step=0001250) Train Loss: 0.2582, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.5557
[2025-02-26 19:21:58] (step=0001300) Train Loss: 0.2434, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.6552
[2025-02-26 19:22:56] (step=0001350) Train Loss: 0.2364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.3161
[2025-02-26 19:23:55] (step=0001400) Train Loss: 0.2348, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.5042
[2025-02-26 19:24:53] (step=0001450) Train Loss: 0.2277, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.4224
[2025-02-26 19:25:51] (step=0001500) Train Loss: 0.2259, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.5836
[2025-02-26 19:26:49] (step=0001550) Train Loss: 0.2221, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.3495
[2025-02-26 19:27:47] (step=0001600) Train Loss: 0.2186, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.4405
[2025-02-26 19:28:45] (step=0001650) Train Loss: 0.2178, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.4377
[2025-02-26 19:29:43] (step=0001700) Train Loss: 0.2125, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.3068
[2025-02-26 19:30:41] (step=0001750) Train Loss: 0.2073, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.3426
[2025-02-26 19:31:40] (step=0001800) Train Loss: 0.2060, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.3475
[2025-02-26 19:32:38] (step=0001850) Train Loss: 0.2193, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.4895
[2025-02-26 19:33:36] (step=0001900) Train Loss: 0.2020, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2940
[2025-02-26 19:34:34] (step=0001950) Train Loss: 0.2011, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.3538
[2025-02-26 19:35:32] (step=0002000) Train Loss: 0.2023, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.5260
[2025-02-26 19:36:30] (step=0002050) Train Loss: 0.1970, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1735
[2025-02-26 19:37:28] (step=0002100) Train Loss: 0.1946, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2574
[2025-02-26 19:38:26] (step=0002150) Train Loss: 0.1945, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1754
[2025-02-26 19:39:25] (step=0002200) Train Loss: 0.1892, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2866
[2025-02-26 19:40:23] (step=0002250) Train Loss: 0.1865, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2055
[2025-02-26 19:41:21] (step=0002300) Train Loss: 0.1848, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2157
[2025-02-26 19:42:19] (step=0002350) Train Loss: 0.1827, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2036
[2025-02-26 19:43:17] (step=0002400) Train Loss: 0.1819, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1909
[2025-02-26 19:44:15] (step=0002450) Train Loss: 0.1816, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2173
[2025-02-26 19:45:13] (step=0002500) Train Loss: 0.1791, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1995
[2025-02-26 19:46:13] (step=0002550) Train Loss: 0.1765, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.83, Grad Norm: 1.1786
[2025-02-26 19:47:12] (step=0002600) Train Loss: 0.1795, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1944
[2025-02-26 19:48:10] (step=0002650) Train Loss: 0.1764, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1169
[2025-02-26 19:49:08] (step=0002700) Train Loss: 0.1726, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0733
[2025-02-26 19:50:06] (step=0002750) Train Loss: 0.1711, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1132
[2025-02-26 19:51:04] (step=0002800) Train Loss: 0.1704, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1612
[2025-02-26 19:52:02] (step=0002850) Train Loss: 0.1700, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0728
[2025-02-26 19:53:00] (step=0002900) Train Loss: 0.1674, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0714
[2025-02-26 19:53:58] (step=0002950) Train Loss: 0.1661, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1566
[2025-02-26 19:54:56] (step=0003000) Train Loss: 0.1642, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1431
[2025-02-26 19:55:55] (step=0003050) Train Loss: 0.1621, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0572
[2025-02-26 19:56:53] (step=0003100) Train Loss: 0.1599, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1324
[2025-02-26 19:57:51] (step=0003150) Train Loss: 0.1577, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0678
[2025-02-26 19:58:49] (step=0003200) Train Loss: 0.1543, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0538
[2025-02-26 19:59:47] (step=0003250) Train Loss: 0.1563, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2765
[2025-02-26 20:00:45] (step=0003300) Train Loss: 0.1600, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.2078
[2025-02-26 20:01:43] (step=0003350) Train Loss: 0.1507, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0590
[2025-02-26 20:02:41] (step=0003400) Train Loss: 0.1517, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1280
[2025-02-26 20:03:39] (step=0003450) Train Loss: 0.1480, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0681
[2025-02-26 20:04:37] (step=0003500) Train Loss: 0.1480, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0666
[2025-02-26 20:05:36] (step=0003550) Train Loss: 0.1466, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0098
[2025-02-26 20:06:34] (step=0003600) Train Loss: 0.1452, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9963
[2025-02-26 20:07:32] (step=0003650) Train Loss: 0.1504, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.1434
[2025-02-26 20:08:30] (step=0003700) Train Loss: 0.1447, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9537
[2025-02-26 20:09:28] (step=0003750) Train Loss: 0.1440, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0469
[2025-02-26 20:10:26] (step=0003800) Train Loss: 0.1410, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9429
[2025-02-26 20:11:24] (step=0003850) Train Loss: 0.1415, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0595
[2025-02-26 20:12:22] (step=0003900) Train Loss: 0.1394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9024
[2025-02-26 20:13:20] (step=0003950) Train Loss: 0.1386, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0402
[2025-02-26 20:14:18] (step=0004000) Train Loss: 0.1359, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0840
[2025-02-26 20:15:17] (step=0004050) Train Loss: 0.1298, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9310
[2025-02-26 20:16:15] (step=0004100) Train Loss: 0.1266, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9506
[2025-02-26 20:17:13] (step=0004150) Train Loss: 0.1210, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 1.0007
[2025-02-26 20:18:11] (step=0004200) Train Loss: 0.1170, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8520
[2025-02-26 20:19:09] (step=0004250) Train Loss: 0.1140, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8831
[2025-02-26 20:20:07] (step=0004300) Train Loss: 0.1130, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9171
[2025-02-26 20:21:05] (step=0004350) Train Loss: 0.1107, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9133
[2025-02-26 20:22:03] (step=0004400) Train Loss: 0.1102, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8802
[2025-02-26 20:23:01] (step=0004450) Train Loss: 0.1070, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8510
[2025-02-26 20:23:59] (step=0004500) Train Loss: 0.1058, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9116
[2025-02-26 20:24:58] (step=0004550) Train Loss: 0.1064, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7968
[2025-02-26 20:25:56] (step=0004600) Train Loss: 0.1035, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.9315
[2025-02-26 20:26:54] (step=0004650) Train Loss: 0.1007, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8342
[2025-02-26 20:27:52] (step=0004700) Train Loss: 0.0987, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8616
[2025-02-26 20:28:50] (step=0004750) Train Loss: 0.0973, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8313
[2025-02-26 20:29:48] (step=0004800) Train Loss: 0.0961, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8699
[2025-02-26 20:30:46] (step=0004850) Train Loss: 0.0947, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8303
[2025-02-26 20:31:44] (step=0004900) Train Loss: 0.0943, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8136
[2025-02-26 20:32:42] (step=0004950) Train Loss: 0.0922, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8209
[2025-02-26 20:33:41] (step=0005000) Train Loss: 0.0903, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7247
[2025-02-26 20:34:41] (step=0005050) Train Loss: 0.0905, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.83, Grad Norm: 0.7793
[2025-02-26 20:35:39] (step=0005100) Train Loss: 0.0900, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7754
[2025-02-26 20:36:37] (step=0005150) Train Loss: 0.0895, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8683
[2025-02-26 20:37:35] (step=0005200) Train Loss: 0.0872, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7289
[2025-02-26 20:38:33] (step=0005250) Train Loss: 0.0862, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7395
[2025-02-26 20:39:31] (step=0005300) Train Loss: 0.0862, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7721
[2025-02-26 20:40:29] (step=0005350) Train Loss: 0.0857, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7593
[2025-02-26 20:41:27] (step=0005400) Train Loss: 0.0850, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.8029
[2025-02-26 20:42:25] (step=0005450) Train Loss: 0.0843, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7242
[2025-02-26 20:43:24] (step=0005500) Train Loss: 0.0851, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6754
[2025-02-26 20:44:22] (step=0005550) Train Loss: 0.0826, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7181
[2025-02-26 20:45:20] (step=0005600) Train Loss: 0.0835, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7320
[2025-02-26 20:46:18] (step=0005650) Train Loss: 0.0831, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7125
[2025-02-26 20:47:16] (step=0005700) Train Loss: 0.0818, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7122
[2025-02-26 20:48:14] (step=0005750) Train Loss: 0.0784, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.7258
[2025-02-26 20:49:12] (step=0005800) Train Loss: 0.0754, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6746
[2025-02-26 20:50:10] (step=0005850) Train Loss: 0.0717, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6990
[2025-02-26 20:51:09] (step=0005900) Train Loss: 0.0687, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6755
[2025-02-26 20:52:07] (step=0005950) Train Loss: 0.0664, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6843
[2025-02-26 20:53:05] (step=0006000) Train Loss: 0.0652, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6628
[2025-02-26 20:54:03] (step=0006050) Train Loss: 0.0641, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6740
[2025-02-26 20:55:01] (step=0006100) Train Loss: 0.0627, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6522
[2025-02-26 20:55:59] (step=0006150) Train Loss: 0.0615, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6744
[2025-02-26 20:56:57] (step=0006200) Train Loss: 0.0602, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6427
[2025-02-26 20:57:56] (step=0006250) Train Loss: 0.0597, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6698
[2025-02-26 20:58:54] (step=0006300) Train Loss: 0.0596, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6330
[2025-02-26 20:59:52] (step=0006350) Train Loss: 0.0590, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6499
[2025-02-26 21:00:50] (step=0006400) Train Loss: 0.0589, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6785
[2025-02-26 21:01:48] (step=0006450) Train Loss: 0.0575, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6372
[2025-02-26 21:02:46] (step=0006500) Train Loss: 0.0568, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6445
[2025-02-26 21:03:44] (step=0006550) Train Loss: 0.0569, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6394
[2025-02-26 21:04:42] (step=0006600) Train Loss: 0.0565, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6586
[2025-02-26 21:05:41] (step=0006650) Train Loss: 0.0559, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6470
[2025-02-26 21:06:39] (step=0006700) Train Loss: 0.0550, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6304
[2025-02-26 21:07:37] (step=0006750) Train Loss: 0.0551, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6592
[2025-02-26 21:08:35] (step=0006800) Train Loss: 0.0543, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6052
[2025-02-26 21:09:33] (step=0006850) Train Loss: 0.0546, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6265
[2025-02-26 21:10:31] (step=0006900) Train Loss: 0.0538, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6321
[2025-02-26 21:11:29] (step=0006950) Train Loss: 0.0539, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6415
[2025-02-26 21:12:28] (step=0007000) Train Loss: 0.0532, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6089
[2025-02-26 21:13:26] (step=0007050) Train Loss: 0.0535, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6583
[2025-02-26 21:14:24] (step=0007100) Train Loss: 0.0529, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6382
[2025-02-26 21:15:22] (step=0007150) Train Loss: 0.0533, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6301
[2025-02-26 21:16:20] (step=0007200) Train Loss: 0.0524, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6056
[2025-02-26 21:17:18] (step=0007250) Train Loss: 0.0530, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6062
[2025-02-26 21:18:16] (step=0007300) Train Loss: 0.0523, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6458
[2025-02-26 21:19:15] (step=0007350) Train Loss: 0.0522, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5763
[2025-02-26 21:20:13] (step=0007400) Train Loss: 0.0519, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6197
[2025-02-26 21:21:11] (step=0007450) Train Loss: 0.0515, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5912
[2025-02-26 21:22:09] (step=0007500) Train Loss: 0.0510, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6195
[2025-02-26 21:23:09] (step=0007550) Train Loss: 0.0507, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.83, Grad Norm: 0.6257
[2025-02-26 21:24:07] (step=0007600) Train Loss: 0.0516, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6095
[2025-02-26 21:25:05] (step=0007650) Train Loss: 0.0506, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5868
[2025-02-26 21:26:03] (step=0007700) Train Loss: 0.0500, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5932
[2025-02-26 21:27:02] (step=0007750) Train Loss: 0.0503, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6129
[2025-02-26 21:28:00] (step=0007800) Train Loss: 0.0496, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6134
[2025-02-26 21:28:58] (step=0007850) Train Loss: 0.0498, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5728
[2025-02-26 21:29:56] (step=0007900) Train Loss: 0.0501, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6074
[2025-02-26 21:30:54] (step=0007950) Train Loss: 0.0496, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.6080
[2025-02-26 21:31:52] (step=0008000) Train Loss: 0.0493, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5980
[2025-02-26 21:32:50] (step=0008050) Train Loss: 0.0487, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5823
[2025-02-26 21:33:48] (step=0008100) Train Loss: 0.0493, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5869
[2025-02-26 21:34:47] (step=0008150) Train Loss: 0.0490, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5740
[2025-02-26 21:35:45] (step=0008200) Train Loss: 0.0489, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5687
[2025-02-26 21:36:43] (step=0008250) Train Loss: 0.0491, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5443
[2025-02-26 21:37:41] (step=0008300) Train Loss: 0.0485, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5606
[2025-02-26 21:38:40] (step=0008350) Train Loss: 0.0490, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5680
[2025-02-26 21:39:38] (step=0008400) Train Loss: 0.0483, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5951
[2025-02-26 21:40:36] (step=0008450) Train Loss: 0.0484, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5601
[2025-02-26 21:41:35] (step=0008500) Train Loss: 0.0481, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5766
[2025-02-26 21:42:33] (step=0008550) Train Loss: 0.0481, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5747
[2025-02-26 21:43:31] (step=0008600) Train Loss: 0.0480, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5568
[2025-02-26 21:44:29] (step=0008650) Train Loss: 0.0480, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5806
[2025-02-26 21:45:28] (step=0008700) Train Loss: 0.0478, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5532
[2025-02-26 21:46:26] (step=0008750) Train Loss: 0.0482, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5727
[2025-02-26 21:47:24] (step=0008800) Train Loss: 0.0476, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5353
[2025-02-26 21:48:23] (step=0008850) Train Loss: 0.0477, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5958
[2025-02-26 21:49:21] (step=0008900) Train Loss: 0.0475, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5398
[2025-02-26 21:50:19] (step=0008950) Train Loss: 0.0474, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5373
[2025-02-26 21:51:17] (step=0009000) Train Loss: 0.0472, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5413
[2025-02-26 21:52:16] (step=0009050) Train Loss: 0.0473, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5398
[2025-02-26 21:53:14] (step=0009100) Train Loss: 0.0474, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5584
[2025-02-26 21:54:12] (step=0009150) Train Loss: 0.0467, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4958
[2025-02-26 21:55:11] (step=0009200) Train Loss: 0.0465, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5409
[2025-02-26 21:56:09] (step=0009250) Train Loss: 0.0468, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5439
[2025-02-26 21:57:07] (step=0009300) Train Loss: 0.0462, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5189
[2025-02-26 21:58:06] (step=0009350) Train Loss: 0.0472, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5177
[2025-02-26 21:59:04] (step=0009400) Train Loss: 0.0470, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5690
[2025-02-26 22:00:02] (step=0009450) Train Loss: 0.0461, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5096
[2025-02-26 22:01:00] (step=0009500) Train Loss: 0.0456, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5387
[2025-02-26 22:01:59] (step=0009550) Train Loss: 0.0467, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5180
[2025-02-26 22:02:57] (step=0009600) Train Loss: 0.0458, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5554
[2025-02-26 22:03:55] (step=0009650) Train Loss: 0.0460, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5097
[2025-02-26 22:04:54] (step=0009700) Train Loss: 0.0453, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5227
[2025-02-26 22:05:52] (step=0009750) Train Loss: 0.0463, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5070
[2025-02-26 22:06:50] (step=0009800) Train Loss: 0.0459, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5167
[2025-02-26 22:07:48] (step=0009850) Train Loss: 0.0461, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5014
[2025-02-26 22:08:47] (step=0009900) Train Loss: 0.0455, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5532
[2025-02-26 22:09:45] (step=0009950) Train Loss: 0.0460, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4872
[2025-02-26 22:10:43] (step=0010000) Train Loss: 0.0463, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5271
[2025-02-26 22:10:46] Saved checkpoint to ../logs/flow/flowsdvae_50kx512_lgnm0p5/checkpoints/0010000.pt
[2025-02-26 22:32:36] (step=0010000), Fid=153.80793313594518, PSNR=9.608041372811794, LPIPS=0.796875, SSIM=0.02934611588716507
[2025-02-26 22:33:37] (step=0010050) Train Loss: 0.0459, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.04, Grad Norm: 0.5262
[2025-02-26 22:34:35] (step=0010100) Train Loss: 0.0454, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.4986
[2025-02-26 22:35:34] (step=0010150) Train Loss: 0.0453, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.5115
[2025-02-26 22:36:32] (step=0010200) Train Loss: 0.0456, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.4983
[2025-02-26 22:37:31] (step=0010250) Train Loss: 0.0452, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5066
[2025-02-26 22:38:29] (step=0010300) Train Loss: 0.0455, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4868
[2025-02-26 22:39:28] (step=0010350) Train Loss: 0.0447, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5167
[2025-02-26 22:40:26] (step=0010400) Train Loss: 0.0456, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4831
[2025-02-26 22:41:24] (step=0010450) Train Loss: 0.0446, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5118
[2025-02-26 22:42:23] (step=0010500) Train Loss: 0.0450, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4823
[2025-02-26 22:43:21] (step=0010550) Train Loss: 0.0449, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5177
[2025-02-26 22:44:20] (step=0010600) Train Loss: 0.0450, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4810
[2025-02-26 22:45:18] (step=0010650) Train Loss: 0.0444, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4944
[2025-02-26 22:46:16] (step=0010700) Train Loss: 0.0449, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5213
[2025-02-26 22:47:15] (step=0010750) Train Loss: 0.0447, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4987
[2025-02-26 22:48:13] (step=0010800) Train Loss: 0.0450, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4801
[2025-02-26 22:49:12] (step=0010850) Train Loss: 0.0444, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4888
[2025-02-26 22:50:10] (step=0010900) Train Loss: 0.0450, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4830
[2025-02-26 22:51:08] (step=0010950) Train Loss: 0.0448, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4766
[2025-02-26 22:52:07] (step=0011000) Train Loss: 0.0445, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4792
[2025-02-26 22:53:05] (step=0011050) Train Loss: 0.0446, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5139
[2025-02-26 22:54:04] (step=0011100) Train Loss: 0.0444, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4613
[2025-02-26 22:55:02] (step=0011150) Train Loss: 0.0441, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4785
[2025-02-26 22:56:00] (step=0011200) Train Loss: 0.0448, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5055
[2025-02-26 22:56:59] (step=0011250) Train Loss: 0.0442, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4823
[2025-02-26 22:57:57] (step=0011300) Train Loss: 0.0439, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4632
[2025-02-26 22:58:56] (step=0011350) Train Loss: 0.0440, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4690
[2025-02-26 22:59:54] (step=0011400) Train Loss: 0.0441, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4554
[2025-02-26 23:00:52] (step=0011450) Train Loss: 0.0436, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4905
[2025-02-26 23:01:51] (step=0011500) Train Loss: 0.0440, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4662
[2025-02-26 23:02:49] (step=0011550) Train Loss: 0.0437, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4514
[2025-02-26 23:03:48] (step=0011600) Train Loss: 0.0440, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4759
[2025-02-26 23:04:46] (step=0011650) Train Loss: 0.0438, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4346
[2025-02-26 23:05:44] (step=0011700) Train Loss: 0.0439, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4704
[2025-02-26 23:06:43] (step=0011750) Train Loss: 0.0439, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.5000
[2025-02-26 23:07:41] (step=0011800) Train Loss: 0.0437, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4593
[2025-02-26 23:08:40] (step=0011850) Train Loss: 0.0440, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4541
[2025-02-26 23:09:38] (step=0011900) Train Loss: 0.0435, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4369
[2025-02-26 23:10:37] (step=0011950) Train Loss: 0.0437, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4618
[2025-02-26 23:11:35] (step=0012000) Train Loss: 0.0437, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4826
[2025-02-26 23:12:33] (step=0012050) Train Loss: 0.0432, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4776
[2025-02-26 23:13:32] (step=0012100) Train Loss: 0.0435, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4421
[2025-02-26 23:14:30] (step=0012150) Train Loss: 0.0439, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4671
[2025-02-26 23:15:29] (step=0012200) Train Loss: 0.0429, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4612
[2025-02-26 23:16:27] (step=0012250) Train Loss: 0.0429, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4314
[2025-02-26 23:17:25] (step=0012300) Train Loss: 0.0430, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4784
[2025-02-26 23:18:24] (step=0012350) Train Loss: 0.0433, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4373
[2025-02-26 23:19:22] (step=0012400) Train Loss: 0.0431, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4537
[2025-02-26 23:20:21] (step=0012450) Train Loss: 0.0432, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4564
[2025-02-26 23:21:19] (step=0012500) Train Loss: 0.0429, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4467
[2025-02-26 23:22:20] (step=0012550) Train Loss: 0.0429, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.83, Grad Norm: 0.4076
[2025-02-26 23:23:18] (step=0012600) Train Loss: 0.0430, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4387
[2025-02-26 23:24:16] (step=0012650) Train Loss: 0.0426, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4192
[2025-02-26 23:25:15] (step=0012700) Train Loss: 0.0434, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4632
[2025-02-26 23:26:13] (step=0012750) Train Loss: 0.0431, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4622
[2025-02-26 23:27:12] (step=0012800) Train Loss: 0.0433, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4512
[2025-02-26 23:28:10] (step=0012850) Train Loss: 0.0429, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4312
[2025-02-26 23:29:08] (step=0012900) Train Loss: 0.0424, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4470
[2025-02-26 23:30:07] (step=0012950) Train Loss: 0.0429, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4344
[2025-02-26 23:31:05] (step=0013000) Train Loss: 0.0430, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4397
[2025-02-26 23:32:04] (step=0013050) Train Loss: 0.0427, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4280
[2025-02-26 23:33:02] (step=0013100) Train Loss: 0.0424, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4210
[2025-02-26 23:34:00] (step=0013150) Train Loss: 0.0433, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4395
[2025-02-26 23:34:59] (step=0013200) Train Loss: 0.0424, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4414
[2025-02-26 23:35:57] (step=0013250) Train Loss: 0.0421, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4437
[2025-02-26 23:36:56] (step=0013300) Train Loss: 0.0422, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3912
[2025-02-26 23:37:54] (step=0013350) Train Loss: 0.0426, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4408
[2025-02-26 23:38:52] (step=0013400) Train Loss: 0.0425, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4200
[2025-02-26 23:39:51] (step=0013450) Train Loss: 0.0424, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4088
[2025-02-26 23:40:49] (step=0013500) Train Loss: 0.0421, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4519
[2025-02-26 23:41:48] (step=0013550) Train Loss: 0.0421, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4324
[2025-02-26 23:42:46] (step=0013600) Train Loss: 0.0423, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4277
[2025-02-26 23:43:44] (step=0013650) Train Loss: 0.0425, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4424
[2025-02-26 23:44:43] (step=0013700) Train Loss: 0.0427, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4128
[2025-02-26 23:45:41] (step=0013750) Train Loss: 0.0424, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4282
[2025-02-26 23:46:40] (step=0013800) Train Loss: 0.0423, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4108
[2025-02-26 23:47:38] (step=0013850) Train Loss: 0.0422, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4321
[2025-02-26 23:48:36] (step=0013900) Train Loss: 0.0422, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3989
[2025-02-26 23:49:35] (step=0013950) Train Loss: 0.0424, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4103
[2025-02-26 23:50:33] (step=0014000) Train Loss: 0.0416, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4256
[2025-02-26 23:51:32] (step=0014050) Train Loss: 0.0419, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3907
[2025-02-26 23:52:30] (step=0014100) Train Loss: 0.0425, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4475
[2025-02-26 23:53:28] (step=0014150) Train Loss: 0.0420, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3960
[2025-02-26 23:54:27] (step=0014200) Train Loss: 0.0420, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4109
[2025-02-26 23:55:25] (step=0014250) Train Loss: 0.0415, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4212
[2025-02-26 23:56:24] (step=0014300) Train Loss: 0.0420, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4180
[2025-02-26 23:57:22] (step=0014350) Train Loss: 0.0417, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3948
[2025-02-26 23:58:20] (step=0014400) Train Loss: 0.0413, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4294
[2025-02-26 23:59:19] (step=0014450) Train Loss: 0.0422, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4113
[2025-02-27 00:00:17] (step=0014500) Train Loss: 0.0417, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4005
[2025-02-27 00:01:16] (step=0014550) Train Loss: 0.0414, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3997
[2025-02-27 00:02:14] (step=0014600) Train Loss: 0.0421, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4155
[2025-02-27 00:03:13] (step=0014650) Train Loss: 0.0420, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4327
[2025-02-27 00:04:11] (step=0014700) Train Loss: 0.0415, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3945
[2025-02-27 00:05:09] (step=0014750) Train Loss: 0.0420, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4190
[2025-02-27 00:06:08] (step=0014800) Train Loss: 0.0416, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4114
[2025-02-27 00:07:06] (step=0014850) Train Loss: 0.0422, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4042
[2025-02-27 00:08:05] (step=0014900) Train Loss: 0.0415, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4031
[2025-02-27 00:09:03] (step=0014950) Train Loss: 0.0418, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4001
[2025-02-27 00:10:01] (step=0015000) Train Loss: 0.0414, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4256
[2025-02-27 00:11:02] (step=0015050) Train Loss: 0.0417, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.83, Grad Norm: 0.3731
[2025-02-27 00:12:00] (step=0015100) Train Loss: 0.0412, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3965
[2025-02-27 00:12:58] (step=0015150) Train Loss: 0.0413, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3801
[2025-02-27 00:13:56] (step=0015200) Train Loss: 0.0418, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4173
[2025-02-27 00:14:55] (step=0015250) Train Loss: 0.0414, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4077
[2025-02-27 00:15:53] (step=0015300) Train Loss: 0.0415, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3939
[2025-02-27 00:16:51] (step=0015350) Train Loss: 0.0413, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3843
[2025-02-27 00:17:49] (step=0015400) Train Loss: 0.0410, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4150
[2025-02-27 00:18:47] (step=0015450) Train Loss: 0.0416, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3772
[2025-02-27 00:19:46] (step=0015500) Train Loss: 0.0411, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3739
[2025-02-27 00:20:44] (step=0015550) Train Loss: 0.0418, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4028
[2025-02-27 00:21:42] (step=0015600) Train Loss: 0.0414, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4036
[2025-02-27 00:22:40] (step=0015650) Train Loss: 0.0411, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3839
[2025-02-27 00:23:38] (step=0015700) Train Loss: 0.0409, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3843
[2025-02-27 00:24:37] (step=0015750) Train Loss: 0.0414, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4007
[2025-02-27 00:25:35] (step=0015800) Train Loss: 0.0416, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3758
[2025-02-27 00:26:33] (step=0015850) Train Loss: 0.0415, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3909
[2025-02-27 00:27:31] (step=0015900) Train Loss: 0.0410, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3906
[2025-02-27 00:28:29] (step=0015950) Train Loss: 0.0412, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3800
[2025-02-27 00:29:28] (step=0016000) Train Loss: 0.0414, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3763
[2025-02-27 00:30:26] (step=0016050) Train Loss: 0.0407, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3918
[2025-02-27 00:31:24] (step=0016100) Train Loss: 0.0415, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3815
[2025-02-27 00:32:22] (step=0016150) Train Loss: 0.0410, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3733
[2025-02-27 00:33:20] (step=0016200) Train Loss: 0.0416, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3757
[2025-02-27 00:34:19] (step=0016250) Train Loss: 0.0407, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3752
[2025-02-27 00:35:17] (step=0016300) Train Loss: 0.0411, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3680
[2025-02-27 00:36:15] (step=0016350) Train Loss: 0.0411, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3717
[2025-02-27 00:37:13] (step=0016400) Train Loss: 0.0406, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3827
[2025-02-27 00:38:11] (step=0016450) Train Loss: 0.0410, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3833
[2025-02-27 00:39:09] (step=0016500) Train Loss: 0.0412, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3512
[2025-02-27 00:40:08] (step=0016550) Train Loss: 0.0407, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3651
[2025-02-27 00:41:06] (step=0016600) Train Loss: 0.0412, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.4038
[2025-02-27 00:42:04] (step=0016650) Train Loss: 0.0413, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3708
[2025-02-27 00:43:02] (step=0016700) Train Loss: 0.0410, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3914
[2025-02-27 00:44:00] (step=0016750) Train Loss: 0.0405, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3610
[2025-02-27 00:44:59] (step=0016800) Train Loss: 0.0406, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3677
[2025-02-27 00:45:57] (step=0016850) Train Loss: 0.0413, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3597
[2025-02-27 00:46:55] (step=0016900) Train Loss: 0.0407, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3675
[2025-02-27 00:47:53] (step=0016950) Train Loss: 0.0409, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3616
[2025-02-27 00:48:51] (step=0017000) Train Loss: 0.0408, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3597
[2025-02-27 00:49:49] (step=0017050) Train Loss: 0.0406, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3676
[2025-02-27 00:50:48] (step=0017100) Train Loss: 0.0410, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3636
[2025-02-27 00:51:46] (step=0017150) Train Loss: 0.0406, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3848
[2025-02-27 00:52:44] (step=0017200) Train Loss: 0.0410, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3593
[2025-02-27 00:53:42] (step=0017250) Train Loss: 0.0406, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3604
[2025-02-27 00:54:40] (step=0017300) Train Loss: 0.0405, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3744
[2025-02-27 00:55:38] (step=0017350) Train Loss: 0.0404, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3702
[2025-02-27 00:56:36] (step=0017400) Train Loss: 0.0410, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3886
[2025-02-27 00:57:35] (step=0017450) Train Loss: 0.0410, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3538
[2025-02-27 00:58:33] (step=0017500) Train Loss: 0.0404, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3755
[2025-02-27 00:59:33] (step=0017550) Train Loss: 0.0405, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.83, Grad Norm: 0.3359
[2025-02-27 01:00:32] (step=0017600) Train Loss: 0.0403, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3675
[2025-02-27 01:01:30] (step=0017650) Train Loss: 0.0407, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3586
[2025-02-27 01:02:28] (step=0017700) Train Loss: 0.0403, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3682
[2025-02-27 01:03:26] (step=0017750) Train Loss: 0.0407, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3525
[2025-02-27 01:04:24] (step=0017800) Train Loss: 0.0404, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3841
[2025-02-27 01:05:23] (step=0017850) Train Loss: 0.0407, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3530
[2025-02-27 01:06:21] (step=0017900) Train Loss: 0.0406, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3534
[2025-02-27 01:07:19] (step=0017950) Train Loss: 0.0407, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3737
[2025-02-27 01:08:17] (step=0018000) Train Loss: 0.0407, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3361
[2025-02-27 01:09:15] (step=0018050) Train Loss: 0.0409, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3583
[2025-02-27 01:10:14] (step=0018100) Train Loss: 0.0403, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3610
[2025-02-27 01:11:12] (step=0018150) Train Loss: 0.0409, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3627
[2025-02-27 01:12:10] (step=0018200) Train Loss: 0.0405, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3363
[2025-02-27 01:13:08] (step=0018250) Train Loss: 0.0404, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3372
[2025-02-27 01:14:06] (step=0018300) Train Loss: 0.0405, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3427
[2025-02-27 01:15:04] (step=0018350) Train Loss: 0.0406, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3523
[2025-02-27 01:16:03] (step=0018400) Train Loss: 0.0400, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3303
[2025-02-27 01:17:01] (step=0018450) Train Loss: 0.0409, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3501
[2025-02-27 01:17:59] (step=0018500) Train Loss: 0.0403, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3526
[2025-02-27 01:18:57] (step=0018550) Train Loss: 0.0400, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3347
[2025-02-27 01:19:55] (step=0018600) Train Loss: 0.0404, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3235
[2025-02-27 01:20:53] (step=0018650) Train Loss: 0.0401, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3480
[2025-02-27 01:21:52] (step=0018700) Train Loss: 0.0398, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3592
[2025-02-27 01:22:50] (step=0018750) Train Loss: 0.0399, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3238
[2025-02-27 01:23:48] (step=0018800) Train Loss: 0.0399, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3390
[2025-02-27 01:24:46] (step=0018850) Train Loss: 0.0401, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3674
[2025-02-27 01:25:44] (step=0018900) Train Loss: 0.0398, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3580
[2025-02-27 01:26:42] (step=0018950) Train Loss: 0.0404, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3365
[2025-02-27 01:27:40] (step=0019000) Train Loss: 0.0405, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3822
[2025-02-27 01:28:39] (step=0019050) Train Loss: 0.0402, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3528
[2025-02-27 01:29:37] (step=0019100) Train Loss: 0.0400, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3446
[2025-02-27 01:30:35] (step=0019150) Train Loss: 0.0402, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3431
[2025-02-27 01:31:33] (step=0019200) Train Loss: 0.0401, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3450
[2025-02-27 01:32:31] (step=0019250) Train Loss: 0.0397, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3238
[2025-02-27 01:33:29] (step=0019300) Train Loss: 0.0401, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3503
[2025-02-27 01:34:27] (step=0019350) Train Loss: 0.0401, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3494
[2025-02-27 01:35:25] (step=0019400) Train Loss: 0.0399, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3330
[2025-02-27 01:36:24] (step=0019450) Train Loss: 0.0403, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3426
[2025-02-27 01:37:22] (step=0019500) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3169
[2025-02-27 01:38:20] (step=0019550) Train Loss: 0.0398, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3495
[2025-02-27 01:39:18] (step=0019600) Train Loss: 0.0399, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3264
[2025-02-27 01:40:16] (step=0019650) Train Loss: 0.0395, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3467
[2025-02-27 01:41:14] (step=0019700) Train Loss: 0.0400, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3462
[2025-02-27 01:42:12] (step=0019750) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3373
[2025-02-27 01:43:11] (step=0019800) Train Loss: 0.0396, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3238
[2025-02-27 01:44:09] (step=0019850) Train Loss: 0.0393, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3521
[2025-02-27 01:45:07] (step=0019900) Train Loss: 0.0396, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3231
[2025-02-27 01:46:05] (step=0019950) Train Loss: 0.0397, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3335
[2025-02-27 01:47:03] (step=0020000) Train Loss: 0.0402, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3363
[2025-02-27 01:47:06] Saved checkpoint to ../logs/flow/flowsdvae_50kx512_lgnm0p5/checkpoints/0020000.pt
[2025-02-27 02:05:34] (step=0020000), Fid=36.15707260139334, PSNR=15.6144161683321, LPIPS=0.6015625, SSIM=0.12831248342990875
[2025-02-27 02:06:34] (step=0020050) Train Loss: 0.0396, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.04, Grad Norm: 0.3179
[2025-02-27 02:07:33] (step=0020100) Train Loss: 0.0402, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.3525
[2025-02-27 02:08:31] (step=0020150) Train Loss: 0.0398, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3031
[2025-02-27 02:09:29] (step=0020200) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3369
[2025-02-27 02:10:28] (step=0020250) Train Loss: 0.0396, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3369
[2025-02-27 02:11:26] (step=0020300) Train Loss: 0.0398, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2979
[2025-02-27 02:12:24] (step=0020350) Train Loss: 0.0399, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3288
[2025-02-27 02:13:23] (step=0020400) Train Loss: 0.0397, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3422
[2025-02-27 02:14:21] (step=0020450) Train Loss: 0.0396, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3237
[2025-02-27 02:15:19] (step=0020500) Train Loss: 0.0397, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3437
[2025-02-27 02:16:18] (step=0020550) Train Loss: 0.0398, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3126
[2025-02-27 02:17:16] (step=0020600) Train Loss: 0.0397, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3175
[2025-02-27 02:18:15] (step=0020650) Train Loss: 0.0396, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3438
[2025-02-27 02:19:13] (step=0020700) Train Loss: 0.0393, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3149
[2025-02-27 02:20:11] (step=0020750) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3412
[2025-02-27 02:21:10] (step=0020800) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3047
[2025-02-27 02:22:08] (step=0020850) Train Loss: 0.0396, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3348
[2025-02-27 02:23:07] (step=0020900) Train Loss: 0.0395, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3499
[2025-02-27 02:24:05] (step=0020950) Train Loss: 0.0395, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3109
[2025-02-27 02:25:03] (step=0021000) Train Loss: 0.0399, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3254
[2025-02-27 02:26:02] (step=0021050) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3398
[2025-02-27 02:27:00] (step=0021100) Train Loss: 0.0399, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3153
[2025-02-27 02:27:58] (step=0021150) Train Loss: 0.0393, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3212
[2025-02-27 02:28:57] (step=0021200) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3291
[2025-02-27 02:29:55] (step=0021250) Train Loss: 0.0395, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3173
[2025-02-27 02:30:54] (step=0021300) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3137
[2025-02-27 02:31:52] (step=0021350) Train Loss: 0.0397, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3371
[2025-02-27 02:32:50] (step=0021400) Train Loss: 0.0395, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3298
[2025-02-27 02:33:49] (step=0021450) Train Loss: 0.0395, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3126
[2025-02-27 02:34:47] (step=0021500) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2999
[2025-02-27 02:35:45] (step=0021550) Train Loss: 0.0396, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3259
[2025-02-27 02:36:44] (step=0021600) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3091
[2025-02-27 02:37:42] (step=0021650) Train Loss: 0.0397, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3249
[2025-02-27 02:38:41] (step=0021700) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3060
[2025-02-27 02:39:39] (step=0021750) Train Loss: 0.0398, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3030
[2025-02-27 02:40:37] (step=0021800) Train Loss: 0.0395, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3246
[2025-02-27 02:41:36] (step=0021850) Train Loss: 0.0395, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3391
[2025-02-27 02:42:34] (step=0021900) Train Loss: 0.0393, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2957
[2025-02-27 02:43:33] (step=0021950) Train Loss: 0.0396, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3199
[2025-02-27 02:44:31] (step=0022000) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3281
[2025-02-27 02:45:29] (step=0022050) Train Loss: 0.0395, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3079
[2025-02-27 02:46:28] (step=0022100) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3204
[2025-02-27 02:47:26] (step=0022150) Train Loss: 0.0395, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3233
[2025-02-27 02:48:24] (step=0022200) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3105
[2025-02-27 02:49:23] (step=0022250) Train Loss: 0.0393, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3053
[2025-02-27 02:50:21] (step=0022300) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3009
[2025-02-27 02:51:20] (step=0022350) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3199
[2025-02-27 02:52:18] (step=0022400) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3322
[2025-02-27 02:53:16] (step=0022450) Train Loss: 0.0397, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3013
[2025-02-27 02:54:15] (step=0022500) Train Loss: 0.0389, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3147
[2025-02-27 02:55:15] (step=0022550) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.82, Grad Norm: 0.3208
[2025-02-27 02:56:14] (step=0022600) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3163
[2025-02-27 02:57:12] (step=0022650) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3177
[2025-02-27 02:58:11] (step=0022700) Train Loss: 0.0393, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2903
[2025-02-27 02:59:09] (step=0022750) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3201
[2025-02-27 03:00:07] (step=0022800) Train Loss: 0.0389, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2971
[2025-02-27 03:01:06] (step=0022850) Train Loss: 0.0393, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3210
[2025-02-27 03:02:04] (step=0022900) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2910
[2025-02-27 03:03:03] (step=0022950) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3161
[2025-02-27 03:04:01] (step=0023000) Train Loss: 0.0389, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3247
[2025-02-27 03:04:59] (step=0023050) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3150
[2025-02-27 03:05:58] (step=0023100) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2970
[2025-02-27 03:06:56] (step=0023150) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2838
[2025-02-27 03:07:55] (step=0023200) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3119
[2025-02-27 03:08:53] (step=0023250) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2931
[2025-02-27 03:09:51] (step=0023300) Train Loss: 0.0394, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2915
[2025-02-27 03:10:50] (step=0023350) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3060
[2025-02-27 03:11:48] (step=0023400) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2786
[2025-02-27 03:12:47] (step=0023450) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3087
[2025-02-27 03:13:45] (step=0023500) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3150
[2025-02-27 03:14:43] (step=0023550) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2907
[2025-02-27 03:15:42] (step=0023600) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3201
[2025-02-27 03:16:40] (step=0023650) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3026
[2025-02-27 03:17:39] (step=0023700) Train Loss: 0.0389, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2837
[2025-02-27 03:18:37] (step=0023750) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3045
[2025-02-27 03:19:35] (step=0023800) Train Loss: 0.0385, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3150
[2025-02-27 03:20:34] (step=0023850) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3000
[2025-02-27 03:21:32] (step=0023900) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2844
[2025-02-27 03:22:31] (step=0023950) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3119
[2025-02-27 03:23:29] (step=0024000) Train Loss: 0.0386, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2874
[2025-02-27 03:24:27] (step=0024050) Train Loss: 0.0389, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3003
[2025-02-27 03:25:26] (step=0024100) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3110
[2025-02-27 03:26:24] (step=0024150) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3257
[2025-02-27 03:27:23] (step=0024200) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2934
[2025-02-27 03:28:21] (step=0024250) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3051
[2025-02-27 03:29:20] (step=0024300) Train Loss: 0.0389, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3078
[2025-02-27 03:30:18] (step=0024350) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2809
[2025-02-27 03:31:16] (step=0024400) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2970
[2025-02-27 03:32:15] (step=0024450) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3069
[2025-02-27 03:33:13] (step=0024500) Train Loss: 0.0392, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2946
[2025-02-27 03:34:12] (step=0024550) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2856
[2025-02-27 03:35:10] (step=0024600) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3012
[2025-02-27 03:36:08] (step=0024650) Train Loss: 0.0385, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3019
[2025-02-27 03:37:07] (step=0024700) Train Loss: 0.0386, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2998
[2025-02-27 03:38:05] (step=0024750) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2938
[2025-02-27 03:39:04] (step=0024800) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3094
[2025-02-27 03:40:02] (step=0024850) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2796
[2025-02-27 03:41:00] (step=0024900) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3023
[2025-02-27 03:41:59] (step=0024950) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2805
[2025-02-27 03:42:57] (step=0025000) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2997
[2025-02-27 03:43:58] (step=0025050) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.82, Grad Norm: 0.2891
[2025-02-27 03:44:56] (step=0025100) Train Loss: 0.0385, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2922
[2025-02-27 03:45:55] (step=0025150) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2912
[2025-02-27 03:46:53] (step=0025200) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2866
[2025-02-27 03:47:52] (step=0025250) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2957
[2025-02-27 03:48:50] (step=0025300) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2639
[2025-02-27 03:49:48] (step=0025350) Train Loss: 0.0389, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3125
[2025-02-27 03:50:47] (step=0025400) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2879
[2025-02-27 03:51:45] (step=0025450) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2735
[2025-02-27 03:52:44] (step=0025500) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2831
[2025-02-27 03:53:42] (step=0025550) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3013
[2025-02-27 03:54:40] (step=0025600) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2775
[2025-02-27 03:55:39] (step=0025650) Train Loss: 0.0390, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2888
[2025-02-27 03:56:37] (step=0025700) Train Loss: 0.0389, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2600
[2025-02-27 03:57:36] (step=0025750) Train Loss: 0.0386, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2919
[2025-02-27 03:58:34] (step=0025800) Train Loss: 0.0385, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3007
[2025-02-27 03:59:32] (step=0025850) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2914
[2025-02-27 04:00:31] (step=0025900) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2919
[2025-02-27 04:01:29] (step=0025950) Train Loss: 0.0386, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2597
[2025-02-27 04:02:28] (step=0026000) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3235
[2025-02-27 04:03:26] (step=0026050) Train Loss: 0.0389, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2882
[2025-02-27 04:04:24] (step=0026100) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2928
[2025-02-27 04:05:23] (step=0026150) Train Loss: 0.0385, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2730
[2025-02-27 04:06:21] (step=0026200) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2959
[2025-02-27 04:07:20] (step=0026250) Train Loss: 0.0385, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2864
[2025-02-27 04:08:18] (step=0026300) Train Loss: 0.0391, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.3057
[2025-02-27 04:09:16] (step=0026350) Train Loss: 0.0385, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2729
[2025-02-27 04:10:15] (step=0026400) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2983
[2025-02-27 04:11:13] (step=0026450) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2670
[2025-02-27 04:12:12] (step=0026500) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2763
[2025-02-27 04:13:10] (step=0026550) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2764
[2025-02-27 04:14:08] (step=0026600) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2800
[2025-02-27 04:15:07] (step=0026650) Train Loss: 0.0388, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2958
[2025-02-27 04:16:05] (step=0026700) Train Loss: 0.0386, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2519
[2025-02-27 04:17:03] (step=0026750) Train Loss: 0.0386, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2963
[2025-02-27 04:18:02] (step=0026800) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2669
[2025-02-27 04:19:00] (step=0026850) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2778
[2025-02-27 04:19:59] (step=0026900) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2768
[2025-02-27 04:20:57] (step=0026950) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2788
[2025-02-27 04:21:55] (step=0027000) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2847
[2025-02-27 04:22:54] (step=0027050) Train Loss: 0.0386, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2712
[2025-02-27 04:23:52] (step=0027100) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2797
[2025-02-27 04:24:51] (step=0027150) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2746
[2025-02-27 04:25:49] (step=0027200) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2811
[2025-02-27 04:26:48] (step=0027250) Train Loss: 0.0386, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2783
[2025-02-27 04:27:46] (step=0027300) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2791
[2025-02-27 04:28:45] (step=0027350) Train Loss: 0.0385, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2894
[2025-02-27 04:29:43] (step=0027400) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2725
[2025-02-27 04:30:42] (step=0027450) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2715
[2025-02-27 04:31:40] (step=0027500) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.2725
[2025-02-27 04:32:41] (step=0027550) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.82, Grad Norm: 0.2912
[2025-02-27 04:33:39] (step=0027600) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2892
[2025-02-27 04:34:37] (step=0027650) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2609
[2025-02-27 04:35:36] (step=0027700) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2945
[2025-02-27 04:36:34] (step=0027750) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2555
[2025-02-27 04:37:33] (step=0027800) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2725
[2025-02-27 04:38:31] (step=0027850) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2704
[2025-02-27 04:39:29] (step=0027900) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2724
[2025-02-27 04:40:28] (step=0027950) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2828
[2025-02-27 04:41:26] (step=0028000) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2795
[2025-02-27 04:42:25] (step=0028050) Train Loss: 0.0387, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2755
[2025-02-27 04:43:23] (step=0028100) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2790
[2025-02-27 04:44:22] (step=0028150) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2637
[2025-02-27 04:45:20] (step=0028200) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2651
[2025-02-27 04:46:18] (step=0028250) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2723
[2025-02-27 04:47:17] (step=0028300) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2787
[2025-02-27 04:48:15] (step=0028350) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2532
[2025-02-27 04:49:14] (step=0028400) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2959
[2025-02-27 04:50:12] (step=0028450) Train Loss: 0.0385, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2533
[2025-02-27 04:51:11] (step=0028500) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2615
[2025-02-27 04:52:09] (step=0028550) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2660
[2025-02-27 04:53:07] (step=0028600) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2592
[2025-02-27 04:54:06] (step=0028650) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2744
[2025-02-27 04:55:04] (step=0028700) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2602
[2025-02-27 04:56:03] (step=0028750) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2605
[2025-02-27 04:57:01] (step=0028800) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2777
[2025-02-27 04:57:59] (step=0028850) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2718
[2025-02-27 04:58:58] (step=0028900) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2677
[2025-02-27 04:59:56] (step=0028950) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2565
[2025-02-27 05:00:55] (step=0029000) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2672
[2025-02-27 05:01:53] (step=0029050) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2758
[2025-02-27 05:02:52] (step=0029100) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2596
[2025-02-27 05:03:50] (step=0029150) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2599
[2025-02-27 05:04:48] (step=0029200) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2718
[2025-02-27 05:05:47] (step=0029250) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2729
[2025-02-27 05:06:45] (step=0029300) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2426
[2025-02-27 05:07:44] (step=0029350) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2699
[2025-02-27 05:08:42] (step=0029400) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2711
[2025-02-27 05:09:41] (step=0029450) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2590
[2025-02-27 05:10:39] (step=0029500) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2731
[2025-02-27 05:11:37] (step=0029550) Train Loss: 0.0379, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2579
[2025-02-27 05:12:36] (step=0029600) Train Loss: 0.0386, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2701
[2025-02-27 05:13:34] (step=0029650) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2662
[2025-02-27 05:14:33] (step=0029700) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2809
[2025-02-27 05:15:31] (step=0029750) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2729
[2025-02-27 05:16:30] (step=0029800) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2777
[2025-02-27 05:17:28] (step=0029850) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2752
[2025-02-27 05:18:26] (step=0029900) Train Loss: 0.0379, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2462
[2025-02-27 05:19:25] (step=0029950) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2626
[2025-02-27 05:20:23] (step=0030000) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2658
[2025-02-27 05:20:27] Saved checkpoint to ../logs/flow/flowsdvae_50kx512_lgnm0p5/checkpoints/0030000.pt
[2025-02-27 05:38:49] (step=0030000), Fid=11.875579540996398, PSNR=21.261029030489922, LPIPS=0.412109375, SSIM=0.363445907831192
[2025-02-27 05:39:50] (step=0030050) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.04, Grad Norm: 0.2539
[2025-02-27 05:40:48] (step=0030100) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.2628
[2025-02-27 05:41:46] (step=0030150) Train Loss: 0.0384, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2699
[2025-02-27 05:42:45] (step=0030200) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2429
[2025-02-27 05:43:43] (step=0030250) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2667
[2025-02-27 05:44:41] (step=0030300) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2559
[2025-02-27 05:45:40] (step=0030350) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2628
[2025-02-27 05:46:38] (step=0030400) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2670
[2025-02-27 05:47:37] (step=0030450) Train Loss: 0.0379, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2651
[2025-02-27 05:48:35] (step=0030500) Train Loss: 0.0379, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2549
[2025-02-27 05:49:33] (step=0030550) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2714
[2025-02-27 05:50:32] (step=0030600) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2512
[2025-02-27 05:51:30] (step=0030650) Train Loss: 0.0379, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2658
[2025-02-27 05:52:29] (step=0030700) Train Loss: 0.0383, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2635
[2025-02-27 05:53:27] (step=0030750) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2597
[2025-02-27 05:54:26] (step=0030800) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2479
[2025-02-27 05:55:24] (step=0030850) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2825
[2025-02-27 05:56:22] (step=0030900) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2532
[2025-02-27 05:57:21] (step=0030950) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2353
[2025-02-27 05:58:19] (step=0031000) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2742
[2025-02-27 05:59:18] (step=0031050) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2425
[2025-02-27 06:00:16] (step=0031100) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2621
[2025-02-27 06:01:15] (step=0031150) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2622
[2025-02-27 06:02:13] (step=0031200) Train Loss: 0.0379, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2462
[2025-02-27 06:03:11] (step=0031250) Train Loss: 0.0379, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2665
[2025-02-27 06:04:10] (step=0031300) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2595
[2025-02-27 06:05:08] (step=0031350) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2619
[2025-02-27 06:06:07] (step=0031400) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2674
[2025-02-27 06:07:05] (step=0031450) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2520
[2025-02-27 06:08:04] (step=0031500) Train Loss: 0.0380, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2631
[2025-02-27 06:09:02] (step=0031550) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2419
[2025-02-27 06:10:00] (step=0031600) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2618
[2025-02-27 06:10:59] (step=0031650) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2557
[2025-02-27 06:11:57] (step=0031700) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2479
[2025-02-27 06:12:56] (step=0031750) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2571
[2025-02-27 06:13:54] (step=0031800) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2608
[2025-02-27 06:14:53] (step=0031850) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2503
[2025-02-27 06:15:51] (step=0031900) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2603
[2025-02-27 06:16:49] (step=0031950) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2569
[2025-02-27 06:17:48] (step=0032000) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2655
[2025-02-27 06:18:46] (step=0032050) Train Loss: 0.0381, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2561
[2025-02-27 06:19:45] (step=0032100) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2293
[2025-02-27 06:20:43] (step=0032150) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2639
[2025-02-27 06:21:42] (step=0032200) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2430
[2025-02-27 06:22:40] (step=0032250) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2522
[2025-02-27 06:23:39] (step=0032300) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2571
[2025-02-27 06:24:37] (step=0032350) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2493
[2025-02-27 06:25:35] (step=0032400) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2471
[2025-02-27 06:26:34] (step=0032450) Train Loss: 0.0379, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2478
[2025-02-27 06:27:32] (step=0032500) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.2430
[2025-02-27 06:28:33] (step=0032550) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.82, Grad Norm: 0.2591
[2025-02-27 06:29:32] (step=0032600) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.2367
[2025-02-27 06:30:30] (step=0032650) Train Loss: 0.0382, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2641
[2025-02-27 06:31:28] (step=0032700) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2641
[2025-02-27 06:32:27] (step=0032750) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.2506
[2025-02-27 06:33:25] (step=0032800) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2450
[2025-02-27 06:34:24] (step=0032850) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2558
[2025-02-27 06:35:22] (step=0032900) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2414
[2025-02-27 06:36:20] (step=0032950) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2446
[2025-02-27 06:37:19] (step=0033000) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2626
[2025-02-27 06:38:17] (step=0033050) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2344
[2025-02-27 06:39:16] (step=0033100) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2366
[2025-02-27 06:40:14] (step=0033150) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2337
[2025-02-27 06:41:12] (step=0033200) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2408
[2025-02-27 06:42:11] (step=0033250) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2578
[2025-02-27 06:43:09] (step=0033300) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2351
[2025-02-27 06:44:08] (step=0033350) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2489
[2025-02-27 06:45:06] (step=0033400) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2425
[2025-02-27 06:46:04] (step=0033450) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2359
[2025-02-27 06:47:03] (step=0033500) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2308
[2025-02-27 06:48:01] (step=0033550) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2625
[2025-02-27 06:48:59] (step=0033600) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2360
[2025-02-27 06:49:58] (step=0033650) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2537
[2025-02-27 06:50:56] (step=0033700) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2370
[2025-02-27 06:51:55] (step=0033750) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2404
[2025-02-27 06:52:53] (step=0033800) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2472
[2025-02-27 06:53:51] (step=0033850) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2523
[2025-02-27 06:54:50] (step=0033900) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2526
[2025-02-27 06:55:48] (step=0033950) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2446
[2025-02-27 06:56:47] (step=0034000) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2570
[2025-02-27 06:57:45] (step=0034050) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2494
[2025-02-27 06:58:43] (step=0034100) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2484
[2025-02-27 06:59:42] (step=0034150) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2318
[2025-02-27 07:00:40] (step=0034200) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2509
[2025-02-27 07:01:39] (step=0034250) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2369
[2025-02-27 07:02:37] (step=0034300) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2295
[2025-02-27 07:03:35] (step=0034350) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2568
[2025-02-27 07:04:34] (step=0034400) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2393
[2025-02-27 07:05:32] (step=0034450) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2527
[2025-02-27 07:06:31] (step=0034500) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2302
[2025-02-27 07:07:29] (step=0034550) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2387
[2025-02-27 07:08:27] (step=0034600) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2521
[2025-02-27 07:09:26] (step=0034650) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2245
[2025-02-27 07:10:24] (step=0034700) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2439
[2025-02-27 07:11:23] (step=0034750) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2350
[2025-02-27 07:12:21] (step=0034800) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2465
[2025-02-27 07:13:19] (step=0034850) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2450
[2025-02-27 07:14:18] (step=0034900) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2466
[2025-02-27 07:15:16] (step=0034950) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2396
[2025-02-27 07:16:15] (step=0035000) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2567
[2025-02-27 07:17:15] (step=0035050) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.83, Grad Norm: 0.2292
[2025-02-27 07:18:13] (step=0035100) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2404
[2025-02-27 07:19:12] (step=0035150) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2395
[2025-02-27 07:20:10] (step=0035200) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2378
[2025-02-27 07:21:08] (step=0035250) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2339
[2025-02-27 07:22:06] (step=0035300) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2340
[2025-02-27 07:23:05] (step=0035350) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2577
[2025-02-27 07:24:03] (step=0035400) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2138
[2025-02-27 07:25:01] (step=0035450) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2387
[2025-02-27 07:25:59] (step=0035500) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2430
[2025-02-27 07:26:57] (step=0035550) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2333
[2025-02-27 07:27:56] (step=0035600) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2324
[2025-02-27 07:28:54] (step=0035650) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2349
[2025-02-27 07:29:52] (step=0035700) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2248
[2025-02-27 07:30:50] (step=0035750) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2428
[2025-02-27 07:31:49] (step=0035800) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2411
[2025-02-27 07:32:47] (step=0035850) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2395
[2025-02-27 07:33:45] (step=0035900) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2405
[2025-02-27 07:34:43] (step=0035950) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2387
[2025-02-27 07:35:41] (step=0036000) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2275
[2025-02-27 07:36:40] (step=0036050) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2458
[2025-02-27 07:37:38] (step=0036100) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2233
[2025-02-27 07:38:36] (step=0036150) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2373
[2025-02-27 07:39:34] (step=0036200) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2290
[2025-02-27 07:40:33] (step=0036250) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2393
[2025-02-27 07:41:31] (step=0036300) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2426
[2025-02-27 07:42:29] (step=0036350) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2267
[2025-02-27 07:43:27] (step=0036400) Train Loss: 0.0376, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2497
[2025-02-27 07:44:25] (step=0036450) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2273
[2025-02-27 07:45:24] (step=0036500) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2412
[2025-02-27 07:46:22] (step=0036550) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2379
[2025-02-27 07:47:20] (step=0036600) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2301
[2025-02-27 07:48:18] (step=0036650) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2329
[2025-02-27 07:49:16] (step=0036700) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2418
[2025-02-27 07:50:15] (step=0036750) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2292
[2025-02-27 07:51:13] (step=0036800) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2400
[2025-02-27 07:52:11] (step=0036850) Train Loss: 0.0378, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2374
[2025-02-27 07:53:09] (step=0036900) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2384
[2025-02-27 07:54:08] (step=0036950) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2319
[2025-02-27 07:55:06] (step=0037000) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2361
[2025-02-27 07:56:04] (step=0037050) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2300
[2025-02-27 07:57:02] (step=0037100) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2298
[2025-02-27 07:58:00] (step=0037150) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2320
[2025-02-27 07:58:59] (step=0037200) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2384
[2025-02-27 07:59:57] (step=0037250) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2184
[2025-02-27 08:00:55] (step=0037300) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2156
[2025-02-27 08:01:53] (step=0037350) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2266
[2025-02-27 08:02:52] (step=0037400) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2286
[2025-02-27 08:03:50] (step=0037450) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2134
[2025-02-27 08:04:48] (step=0037500) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2426
[2025-02-27 08:05:48] (step=0037550) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.83, Grad Norm: 0.2222
[2025-02-27 08:06:47] (step=0037600) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2207
[2025-02-27 08:07:45] (step=0037650) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2257
[2025-02-27 08:08:43] (step=0037700) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2198
[2025-02-27 08:09:41] (step=0037750) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2152
[2025-02-27 08:10:39] (step=0037800) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2187
[2025-02-27 08:11:38] (step=0037850) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2195
[2025-02-27 08:12:36] (step=0037900) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2315
[2025-02-27 08:13:34] (step=0037950) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2134
[2025-02-27 08:14:32] (step=0038000) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2323
[2025-02-27 08:15:31] (step=0038050) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2132
[2025-02-27 08:16:29] (step=0038100) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2310
[2025-02-27 08:17:27] (step=0038150) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2189
[2025-02-27 08:18:25] (step=0038200) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2319
[2025-02-27 08:19:24] (step=0038250) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2210
[2025-02-27 08:20:22] (step=0038300) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2155
[2025-02-27 08:21:20] (step=0038350) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2274
[2025-02-27 08:22:18] (step=0038400) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2227
[2025-02-27 08:23:16] (step=0038450) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2319
[2025-02-27 08:24:15] (step=0038500) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2252
[2025-02-27 08:25:13] (step=0038550) Train Loss: 0.0377, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2260
[2025-02-27 08:26:11] (step=0038600) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2202
[2025-02-27 08:27:09] (step=0038650) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2245
[2025-02-27 08:28:08] (step=0038700) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2372
[2025-02-27 08:29:06] (step=0038750) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2206
[2025-02-27 08:30:04] (step=0038800) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2402
[2025-02-27 08:31:02] (step=0038850) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2333
[2025-02-27 08:32:00] (step=0038900) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2258
[2025-02-27 08:32:59] (step=0038950) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2081
[2025-02-27 08:33:57] (step=0039000) Train Loss: 0.0375, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2319
[2025-02-27 08:34:55] (step=0039050) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2221
[2025-02-27 08:35:53] (step=0039100) Train Loss: 0.0374, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2149
[2025-02-27 08:36:52] (step=0039150) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2380
[2025-02-27 08:37:50] (step=0039200) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2213
[2025-02-27 08:38:48] (step=0039250) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2255
[2025-02-27 08:39:46] (step=0039300) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2269
[2025-02-27 08:40:44] (step=0039350) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2397
[2025-02-27 08:41:43] (step=0039400) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2164
[2025-02-27 08:42:41] (step=0039450) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2398
[2025-02-27 08:43:39] (step=0039500) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2071
[2025-02-27 08:44:37] (step=0039550) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2300
[2025-02-27 08:45:36] (step=0039600) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2169
[2025-02-27 08:46:34] (step=0039650) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2490
[2025-02-27 08:47:32] (step=0039700) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2118
[2025-02-27 08:48:30] (step=0039750) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2264
[2025-02-27 08:49:28] (step=0039800) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1861
[2025-02-27 08:50:27] (step=0039850) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2314
[2025-02-27 08:51:25] (step=0039900) Train Loss: 0.0361, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2020
[2025-02-27 08:52:23] (step=0039950) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2151
[2025-02-27 08:53:21] (step=0040000) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2195
[2025-02-27 08:53:24] Saved checkpoint to ../logs/flow/flowsdvae_50kx512_lgnm0p5/checkpoints/0040000.pt
[2025-02-27 09:11:59] (step=0040000), Fid=5.884028175804133, PSNR=23.729816410851477, LPIPS=0.2890625, SSIM=0.5677300095558167
[2025-02-27 09:13:00] (step=0040050) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.04, Grad Norm: 0.2216
[2025-02-27 09:13:58] (step=0040100) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.2159
[2025-02-27 09:14:57] (step=0040150) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.85, Grad Norm: 0.2444
[2025-02-27 09:15:55] (step=0040200) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2082
[2025-02-27 09:16:53] (step=0040250) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2320
[2025-02-27 09:17:52] (step=0040300) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2075
[2025-02-27 09:18:50] (step=0040350) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2212
[2025-02-27 09:19:49] (step=0040400) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2206
[2025-02-27 09:20:47] (step=0040450) Train Loss: 0.0373, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2160
[2025-02-27 09:21:45] (step=0040500) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2309
[2025-02-27 09:22:44] (step=0040550) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2213
[2025-02-27 09:23:42] (step=0040600) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2076
[2025-02-27 09:24:41] (step=0040650) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2270
[2025-02-27 09:25:39] (step=0040700) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2209
[2025-02-27 09:26:37] (step=0040750) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2185
[2025-02-27 09:27:36] (step=0040800) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2297
[2025-02-27 09:28:34] (step=0040850) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2178
[2025-02-27 09:29:33] (step=0040900) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2224
[2025-02-27 09:30:31] (step=0040950) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2177
[2025-02-27 09:31:29] (step=0041000) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2103
[2025-02-27 09:32:28] (step=0041050) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2069
[2025-02-27 09:33:26] (step=0041100) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2206
[2025-02-27 09:34:25] (step=0041150) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2086
[2025-02-27 09:35:23] (step=0041200) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2139
[2025-02-27 09:36:22] (step=0041250) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2130
[2025-02-27 09:37:20] (step=0041300) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2215
[2025-02-27 09:38:18] (step=0041350) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2144
[2025-02-27 09:39:17] (step=0041400) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2167
[2025-02-27 09:40:15] (step=0041450) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2216
[2025-02-27 09:41:14] (step=0041500) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2164
[2025-02-27 09:42:12] (step=0041550) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2190
[2025-02-27 09:43:10] (step=0041600) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2261
[2025-02-27 09:44:09] (step=0041650) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2142
[2025-02-27 09:45:07] (step=0041700) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2178
[2025-02-27 09:46:06] (step=0041750) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2142
[2025-02-27 09:47:04] (step=0041800) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2176
[2025-02-27 09:48:02] (step=0041850) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2188
[2025-02-27 09:49:01] (step=0041900) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2167
[2025-02-27 09:49:59] (step=0041950) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2073
[2025-02-27 09:50:58] (step=0042000) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2299
[2025-02-27 09:51:56] (step=0042050) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2154
[2025-02-27 09:52:55] (step=0042100) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1964
[2025-02-27 09:53:53] (step=0042150) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2213
[2025-02-27 09:54:51] (step=0042200) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1904
[2025-02-27 09:55:50] (step=0042250) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2222
[2025-02-27 09:56:48] (step=0042300) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2041
[2025-02-27 09:57:47] (step=0042350) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2168
[2025-02-27 09:58:45] (step=0042400) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2166
[2025-02-27 09:59:43] (step=0042450) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2031
[2025-02-27 10:00:42] (step=0042500) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2107
[2025-02-27 10:01:43] (step=0042550) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.82, Grad Norm: 0.2129
[2025-02-27 10:02:41] (step=0042600) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2164
[2025-02-27 10:03:39] (step=0042650) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2136
[2025-02-27 10:04:37] (step=0042700) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2117
[2025-02-27 10:05:36] (step=0042750) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2349
[2025-02-27 10:06:34] (step=0042800) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1939
[2025-02-27 10:07:32] (step=0042850) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1992
[2025-02-27 10:08:30] (step=0042900) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2275
[2025-02-27 10:09:29] (step=0042950) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1913
[2025-02-27 10:10:27] (step=0043000) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2001
[2025-02-27 10:11:25] (step=0043050) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2147
[2025-02-27 10:12:23] (step=0043100) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2138
[2025-02-27 10:13:22] (step=0043150) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1985
[2025-02-27 10:14:20] (step=0043200) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2183
[2025-02-27 10:15:18] (step=0043250) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2069
[2025-02-27 10:16:16] (step=0043300) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2043
[2025-02-27 10:17:15] (step=0043350) Train Loss: 0.0360, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1988
[2025-02-27 10:18:13] (step=0043400) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2058
[2025-02-27 10:19:11] (step=0043450) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2130
[2025-02-27 10:20:09] (step=0043500) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2212
[2025-02-27 10:21:08] (step=0043550) Train Loss: 0.0370, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2224
[2025-02-27 10:22:06] (step=0043600) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1966
[2025-02-27 10:23:04] (step=0043650) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2002
[2025-02-27 10:24:02] (step=0043700) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2059
[2025-02-27 10:25:01] (step=0043750) Train Loss: 0.0361, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2000
[2025-02-27 10:25:59] (step=0043800) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1932
[2025-02-27 10:26:57] (step=0043850) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2067
[2025-02-27 10:27:55] (step=0043900) Train Loss: 0.0360, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2016
[2025-02-27 10:28:54] (step=0043950) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1989
[2025-02-27 10:29:52] (step=0044000) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2031
[2025-02-27 10:30:50] (step=0044050) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2016
[2025-02-27 10:31:48] (step=0044100) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2095
[2025-02-27 10:32:47] (step=0044150) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2106
[2025-02-27 10:33:45] (step=0044200) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2074
[2025-02-27 10:34:43] (step=0044250) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2033
[2025-02-27 10:35:41] (step=0044300) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2230
[2025-02-27 10:36:40] (step=0044350) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2082
[2025-02-27 10:37:38] (step=0044400) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2010
[2025-02-27 10:38:36] (step=0044450) Train Loss: 0.0372, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2141
[2025-02-27 10:39:34] (step=0044500) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1965
[2025-02-27 10:40:33] (step=0044550) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2068
[2025-02-27 10:41:31] (step=0044600) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1983
[2025-02-27 10:42:29] (step=0044650) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2009
[2025-02-27 10:43:27] (step=0044700) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2103
[2025-02-27 10:44:26] (step=0044750) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2101
[2025-02-27 10:45:24] (step=0044800) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1862
[2025-02-27 10:46:22] (step=0044850) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2095
[2025-02-27 10:47:21] (step=0044900) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1970
[2025-02-27 10:48:19] (step=0044950) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2165
[2025-02-27 10:49:17] (step=0045000) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2107
[2025-02-27 10:50:18] (step=0045050) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.82, Grad Norm: 0.1985
[2025-02-27 10:51:16] (step=0045100) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2127
[2025-02-27 10:52:14] (step=0045150) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2048
[2025-02-27 10:53:13] (step=0045200) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2063
[2025-02-27 10:54:11] (step=0045250) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2128
[2025-02-27 10:55:09] (step=0045300) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1939
[2025-02-27 10:56:07] (step=0045350) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2158
[2025-02-27 10:57:06] (step=0045400) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2002
[2025-02-27 10:58:04] (step=0045450) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2049
[2025-02-27 10:59:02] (step=0045500) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1869
[2025-02-27 11:00:00] (step=0045550) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1986
[2025-02-27 11:00:59] (step=0045600) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2097
[2025-02-27 11:01:57] (step=0045650) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1978
[2025-02-27 11:02:55] (step=0045700) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1978
[2025-02-27 11:03:53] (step=0045750) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2000
[2025-02-27 11:04:52] (step=0045800) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2008
[2025-02-27 11:05:50] (step=0045850) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2058
[2025-02-27 11:06:48] (step=0045900) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1978
[2025-02-27 11:07:46] (step=0045950) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2022
[2025-02-27 11:08:45] (step=0046000) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1932
[2025-02-27 11:09:43] (step=0046050) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2049
[2025-02-27 11:10:41] (step=0046100) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2069
[2025-02-27 11:11:39] (step=0046150) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1992
[2025-02-27 11:12:38] (step=0046200) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1871
[2025-02-27 11:13:36] (step=0046250) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2077
[2025-02-27 11:14:34] (step=0046300) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1922
[2025-02-27 11:15:32] (step=0046350) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2130
[2025-02-27 11:16:31] (step=0046400) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1991
[2025-02-27 11:17:29] (step=0046450) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2047
[2025-02-27 11:18:28] (step=0046500) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2043
[2025-02-27 11:19:26] (step=0046550) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2039
[2025-02-27 11:20:24] (step=0046600) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2080
[2025-02-27 11:21:22] (step=0046650) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2094
[2025-02-27 11:22:21] (step=0046700) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1871
[2025-02-27 11:23:19] (step=0046750) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1922
[2025-02-27 11:24:17] (step=0046800) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1817
[2025-02-27 11:25:15] (step=0046850) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2056
[2025-02-27 11:26:14] (step=0046900) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2119
[2025-02-27 11:27:12] (step=0046950) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1926
[2025-02-27 11:28:10] (step=0047000) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1964
[2025-02-27 11:29:08] (step=0047050) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1981
[2025-02-27 11:30:07] (step=0047100) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1883
[2025-02-27 11:31:05] (step=0047150) Train Loss: 0.0371, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1988
[2025-02-27 11:32:03] (step=0047200) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1847
[2025-02-27 11:33:01] (step=0047250) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1955
[2025-02-27 11:34:00] (step=0047300) Train Loss: 0.0361, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1916
[2025-02-27 11:34:58] (step=0047350) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2099
[2025-02-27 11:35:56] (step=0047400) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1824
[2025-02-27 11:36:54] (step=0047450) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1964
[2025-02-27 11:37:53] (step=0047500) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1973
[2025-02-27 11:38:53] (step=0047550) Train Loss: 0.0360, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.83, Grad Norm: 0.2015
[2025-02-27 11:39:51] (step=0047600) Train Loss: 0.0369, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1964
[2025-02-27 11:40:50] (step=0047650) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1918
[2025-02-27 11:41:48] (step=0047700) Train Loss: 0.0357, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2018
[2025-02-27 11:42:46] (step=0047750) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2030
[2025-02-27 11:43:44] (step=0047800) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1940
[2025-02-27 11:44:43] (step=0047850) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1848
[2025-02-27 11:45:41] (step=0047900) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2005
[2025-02-27 11:46:39] (step=0047950) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2072
[2025-02-27 11:47:37] (step=0048000) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1873
[2025-02-27 11:48:36] (step=0048050) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1966
[2025-02-27 11:49:34] (step=0048100) Train Loss: 0.0368, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1883
[2025-02-27 11:50:32] (step=0048150) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2008
[2025-02-27 11:51:30] (step=0048200) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1966
[2025-02-27 11:52:29] (step=0048250) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1895
[2025-02-27 11:53:27] (step=0048300) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1991
[2025-02-27 11:54:25] (step=0048350) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1923
[2025-02-27 11:55:24] (step=0048400) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2062
[2025-02-27 11:56:22] (step=0048450) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1841
[2025-02-27 11:57:20] (step=0048500) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1991
[2025-02-27 11:58:18] (step=0048550) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1935
[2025-02-27 11:59:17] (step=0048600) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1979
[2025-02-27 12:00:15] (step=0048650) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2057
[2025-02-27 12:01:13] (step=0048700) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1933
[2025-02-27 12:02:11] (step=0048750) Train Loss: 0.0360, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1994
[2025-02-27 12:03:10] (step=0048800) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1902
[2025-02-27 12:04:08] (step=0048850) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1921
[2025-02-27 12:05:06] (step=0048900) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1967
[2025-02-27 12:06:04] (step=0048950) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1963
[2025-02-27 12:07:03] (step=0049000) Train Loss: 0.0359, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1985
[2025-02-27 12:08:01] (step=0049050) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2021
[2025-02-27 12:08:59] (step=0049100) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1893
[2025-02-27 12:09:57] (step=0049150) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1962
[2025-02-27 12:10:56] (step=0049200) Train Loss: 0.0367, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1835
[2025-02-27 12:11:54] (step=0049250) Train Loss: 0.0361, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1932
[2025-02-27 12:12:52] (step=0049300) Train Loss: 0.0359, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1878
[2025-02-27 12:13:50] (step=0049350) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1911
[2025-02-27 12:14:49] (step=0049400) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1989
[2025-02-27 12:15:47] (step=0049450) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1902
[2025-02-27 12:16:45] (step=0049500) Train Loss: 0.0356, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2057
[2025-02-27 12:17:43] (step=0049550) Train Loss: 0.0361, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1905
[2025-02-27 12:18:42] (step=0049600) Train Loss: 0.0365, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1980
[2025-02-27 12:19:40] (step=0049650) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1847
[2025-02-27 12:20:38] (step=0049700) Train Loss: 0.0362, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1986
[2025-02-27 12:21:36] (step=0049750) Train Loss: 0.0359, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1875
[2025-02-27 12:22:34] (step=0049800) Train Loss: 0.0361, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1966
[2025-02-27 12:23:33] (step=0049850) Train Loss: 0.0361, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1865
[2025-02-27 12:24:31] (step=0049900) Train Loss: 0.0366, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.2004
[2025-02-27 12:25:29] (step=0049950) Train Loss: 0.0363, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1855
[2025-02-27 12:26:27] (step=0050000) Train Loss: 0.0364, Perceptual Loss: 0.0000, Cos Loss: 0.0000, Train Steps/Sec: 0.86, Grad Norm: 0.1954
[2025-02-27 12:26:31] Saved checkpoint to ../logs/flow/flowsdvae_50kx512_lgnm0p5/checkpoints/0050000.pt
[2025-02-27 12:45:43] (step=0050000), Fid=3.3542575619146078, PSNR=24.5998541012764, LPIPS=0.2314453125, SSIM=0.6293783783912659
[2025-02-27 12:45:44] Done!