Skip to content

Instantly share code, notes, and snippets.

  • Save tysam-code/5be7aa7d1331793c482a10f80d0859ac to your computer and use it in GitHub Desktop.
Save tysam-code/5be7aa7d1331793c482a10f80d0859ac to your computer and use it in GitHub Desktop.
import os
import sys
with open(sys.argv[0]) as f:
code = f.read() # read the code of this file ASAP, for logging
import uuid
import time
import glob
import subprocess
import contextlib
from dataclasses import dataclass
import torch
torch.empty(1, device='cuda', requires_grad=True).backward()
from torch import nn
import torch.nn.functional as F
import torch.distributed as dist
from torch.nn.parallel import DistributedDataParallel as DDP
# use of FlexAttention contributed by @KoszarskyB
from torch.nn.attention.flex_attention import BlockMask, flex_attention
# -----------------------------------------------------------------------------
# Muon optimizer
@torch.compile
def zeropower_via_newtonschulz5(G, steps):
"""
Newton-Schulz iteration to compute the zeroth power / orthogonalization of G. We opt to use a
quintic iteration whose coefficients are selected to maximize the slope at zero. For the purpose
of minimizing steps, it turns out to be empirically effective to keep increasing the slope at
zero even beyond the point where the iteration no longer converges all the way to one everywhere
on the interval. This iteration therefore does not produce UV^T but rather something like US'V^T
where S' is diagonal with S_{ii}' ~ Uniform(0.5, 1.5), which turns out not to hurt model
performance at all relative to UV^T, where USV^T = G is the SVD.
"""
assert len(G.shape) == 2
a, b, c = (3.4445, -4.7750, 2.0315)
X = G.bfloat16()
if G.size(0) > G.size(1):
X = X.T
# Ensure spectral norm is at most 1
X = X / (X.norm() + 1e-7)
# Perform the NS iterations
for _ in range(steps):
A = X @ X.T
B = b * A + c * A @ A # adapted from suggestion by @jxbz, @leloykun, and @YouJiacheng
X = a * X + B @ X
if G.size(0) > G.size(1):
X = X.T
return X
class Muon(torch.optim.Optimizer):
"""
Muon - MomentUm Orthogonalized by Newton-schulz
Muon internally runs standard SGD-momentum, and then performs an orthogonalization post-
processing step, in which each 2D parameter's update is replaced with the nearest orthogonal
matrix. To efficiently orthogonalize each update, we use a Newton-Schulz iteration, which has
the advantage that it can be stably run in bfloat16 on the GPU.
Some warnings:
- This optimizer assumes that all parameters passed in are 2D.
- It should not be used for the embedding layer, the final fully connected layer, or any {0,1}-D
parameters; those should all be optimized by a standard method (e.g., AdamW).
- To use it with 4D convolutional filters, it works well to just flatten their last 3 dimensions.
- We believe it is unlikely to work well for training with small batch size.
- We believe it may not work well for finetuning pretrained models, but we haven't tested this.
- We have not yet tried this optimizer for training scenarios larger than NanoGPT (124M).
Arguments:
lr: The learning rate used by the internal SGD.
momentum: The momentum used by the internal SGD.
nesterov: Whether to use Nesterov-style momentum in the internal SGD. (recommended)
ns_steps: The number of Newton-Schulz iteration steps to use.
"""
def __init__(self, params, lr=0.02, momentum=0.95, nesterov=True, ns_steps=5):
self.world_size = int(os.environ['WORLD_SIZE'])
self.rank = int(os.environ['RANK'])
defaults = dict(lr=lr, momentum=momentum, nesterov=nesterov, ns_steps=ns_steps)
assert all(isinstance(p, torch.Tensor) for p in params)
sizes = {p.numel() for p in params}
param_groups = [dict(params=[p for p in params if p.numel() == size],
update_buffer=[torch.empty(size, device='cuda', dtype=torch.bfloat16) for _ in range(self.world_size)])
for size in sizes]
super().__init__(param_groups, defaults)
def step(self):
for group in self.param_groups:
lr = group['lr']
momentum = group['momentum']
nesterov = group['nesterov']
ns_steps = group['ns_steps']
update_buffers = group['update_buffer']
# generate weight updates in distributed fashion
params = group['params']
"""
handle = None
params_world = None
def update_prev():
if params_world is None:
return
assert handle is not None
handle.wait()
for p_world, g_world in zip(params_world, update_buffers):
p_world.data.add_(
g_world.view_as(p_world),
alpha=-lr * max(1, p_world.size(0) / p_world.size(1)) ** 0.5,
)
"""
# Single-GPU-only experiments, disabling comms silliness due to segfault stuff :(
for base_i in range(len(params)): #[::self.world_size]:
if True:
#if base_i + rank < len(params):
p = params[base_i + self.rank]
g = p.grad
assert g is not None
state = self.state[p]
if 'momentum_buffer' not in state:
state['momentum_buffer'] = torch.zeros_like(g)
buf = state['momentum_buffer']
buf.lerp_(g, 1 - momentum)
g = g.lerp_(buf, momentum) if nesterov else buf
g = zeropower_via_newtonschulz5(g, steps=ns_steps).flatten()
p.data.add_(g.view_as(p), alpha=-lr * max(1, p.size(0) / p.size(1)) ** 0.5)
#else:
# g = update_buffers[rank]
#update_prev() # async all_gather instead of sync all_reduce by @YouJiacheng
#handle = dist.all_gather(update_buffers, g, async_op=True)
#params_world = params[base_i : base_i + self.world_size]
#update_prev()
# -----------------------------------------------------------------------------
# PyTorch nn.Module definitions for the GPT-2 model
def norm(x):
return F.rms_norm(x, (x.size(-1),))
class CastedLinear(nn.Linear):
def __init__(self, in_features, out_features):
super().__init__(in_features, out_features, bias=False)
def forward(self, x):
return F.linear(x, self.weight.type_as(x))
class Rotary(nn.Module):
def __init__(self, dim, max_seq_len=65536):
super().__init__()
# half-truncate RoPE by @YouJiacheng
angular_freq = (1 / 1024) ** torch.linspace(0, 1, steps=dim//4, dtype=torch.float32)
angular_freq = torch.cat([angular_freq, angular_freq.new_zeros(dim//4)])
t = torch.arange(max_seq_len, dtype=torch.float32)
theta = torch.einsum('i,j -> ij', t, angular_freq)
self.cos = nn.Buffer(theta.cos(), persistent=False)
self.sin = nn.Buffer(theta.sin(), persistent=False)
def forward(self, x):
cos, sin = self.cos[None, :x.size(-3), None, :], self.sin[None, :x.size(-3), None, :]
x1, x2 = x.float().chunk(2, dim=-1)
y1 = x1 * cos + x2 * sin
y2 = x1 * (-sin) + x2 * cos
return torch.cat((y1, y2), 3).type_as(x)
class CausalSelfAttention(nn.Module):
def __init__(self, dim, num_heads):
super().__init__()
assert dim % num_heads == 0
self.num_heads = num_heads
self.c_q = CastedLinear(dim, dim)
self.c_k = CastedLinear(dim, dim)
self.c_v = CastedLinear(dim, dim)
self.lambdas = nn.Parameter(torch.tensor([0.5, 0.5]))
self.rotary = Rotary(dim // num_heads) # dim // num_heads = head_dim
self.c_proj = CastedLinear(dim, dim)
self.c_proj.weight.data.zero_() # zero init suggested by @Grad62304977
def forward(self, x, ve, block_mask):
B, T = x.size(0), x.size(1) # batch size, sequence length
assert B == 1, 'Must use batch size = 1 for FlexAttention'
q = self.c_q(x).view(B, T, self.num_heads, -1)
k = self.c_k(x).view(B, T, self.num_heads, -1)
v = self.c_v(x).view(B, T, self.num_heads, -1)
if ve is not None:
v = self.lambdas[0] * v + self.lambdas[1] * ve.view_as(v) # @KoszarskyB & @Grad62304977
else: # skip mid-layers token value embeddings by @YouJiacheng
v = self.lambdas[0] * v
q, k = norm(q), norm(k) # QK norm @Grad62304977
q, k = self.rotary(q), self.rotary(k)
y = flex_attention(q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2), block_mask=block_mask)
y = y.transpose(1, 2).contiguous().view_as(x) # re-assemble all head outputs side by side
y = self.c_proj(y)
return y
class MLP(nn.Module):
def __init__(self, dim):
super().__init__()
self.c_fc = CastedLinear(dim, 4 * dim)
self.c_proj = CastedLinear(4 * dim, dim)
self.c_proj.weight.data.zero_() # zero init suggested by @Grad62304977
def forward(self, x):
x = self.c_fc(x)
x = F.relu(x).square() # https://arxiv.org/abs/2109.08668v2; ~1-2% better than GELU; suggested by @SKYLINEZ007 and @Grad62304977
x = self.c_proj(x)
return x
class Block(nn.Module):
def __init__(self, model_dim, num_heads, use_attn=True):
super().__init__()
self.attn = CausalSelfAttention(model_dim, num_heads) if use_attn else None
self.mlp = MLP(model_dim)
self.lambdas = nn.Parameter(torch.tensor([1., 0.]))
def forward(self, x, ve, x0, block_mask):
x = self.lambdas[0] * x + self.lambdas[1] * x0
if self.attn is not None:
x = x + self.attn(norm(x), ve, block_mask)
x = x + self.mlp(norm(x))
return x
class ValueEmbedding(nn.Module):
def __init__(self, vocab_size, model_dim):
super().__init__()
self.embed = nn.ModuleList([nn.Embedding(vocab_size, model_dim) for _ in range(3)])
def forward(self, inputs):
ve = [emb(inputs).bfloat16() for emb in self.embed]
# 012 ... 012 structure on token value embeddings by @YouJiacheng, improved on @leloykun's U-net structure
ve = [ve[0], ve[1], ve[2], None, None, None, None, None, None, ve[0], ve[1], ve[2]]
return ve
# -----------------------------------------------------------------------------
# The main GPT-2 model
class GPT(nn.Module):
def __init__(self, vocab_size, num_layers, num_heads, model_dim):
super().__init__()
self.embed = nn.Embedding(vocab_size, model_dim)
# skip attention of blocks.7 (the 8th layer) by @YouJiacheng
self.blocks = nn.ModuleList([Block(model_dim, num_heads, use_attn=(i != 7))
for i in range(num_layers)])
# token value embeddings by @KoszarskyB - inspired by @Grad62304977's value residual learning
# U-net structure on token value embeddings by @leloykun
self.value_embeds = ValueEmbedding(vocab_size, model_dim)
self.lm_head = CastedLinear(model_dim, vocab_size)
self.lm_head.weight.data.zero_() # @Grad62304977
# U-net design by @brendanh0gan
self.num_encoder_layers = num_layers // 2 # Half of the layers for encoder
self.num_decoder_layers = num_layers - self.num_encoder_layers # Remaining for decoder
# Add learnable skip connection weights for decoder layers
self.skip_weights = nn.Parameter(torch.ones(self.num_decoder_layers))
def forward(self, inputs, targets, sliding_window_num_blocks):
BLOCK_SIZE = 128
seq_len = len(inputs)
assert seq_len % BLOCK_SIZE == 0
total_num_blocks = seq_len // BLOCK_SIZE
assert inputs.ndim == 1
docs = (inputs == 50256).cumsum(0)
docs_low = docs.view(-1, BLOCK_SIZE)[:, 0].contiguous()
docs_high = docs.view(-1, BLOCK_SIZE)[:, -1].contiguous()
def document_causal(b, h, q_idx, kv_idx):
causal_mask = q_idx >= kv_idx
document_mask = docs[q_idx] == docs[kv_idx]
return causal_mask & document_mask
def dense_to_ordered(dense_mask):
num_blocks = dense_mask.sum(dim=-1, dtype=torch.int32)
indices = dense_mask.argsort(dim=-1, descending=True, stable=True).to(torch.int32)
return num_blocks[None, None].contiguous(), indices[None, None].contiguous()
def create_doc_swc_block_mask(sliding_window_num_blocks):
kv_idx = block_idx = torch.arange(total_num_blocks, dtype=torch.int32, device='cuda')
q_idx = block_idx[:, None]
causal_bm = q_idx >= kv_idx
causal_full_bm = q_idx > kv_idx
window_bm = q_idx - kv_idx < sliding_window_num_blocks
window_full_bm = window_bm # block-wise sliding window by @YouJiacheng
# document_bm = (docs_low[q_idx] <= docs_high[kv_idx]) & (docs_low[kv_idx] <= docs_high[q_idx])
document_bm = (docs_low[:, None] <= docs_high) & (docs_low <= docs_high[:, None])
document_full_bm = (docs_low[:, None] == docs_high) & (docs_low == docs_high[:, None])
nonzero_bm = causal_bm & window_bm & document_bm
full_bm = causal_full_bm & window_full_bm & document_full_bm
kv_num_blocks, kv_indices = dense_to_ordered(nonzero_bm & ~full_bm)
full_kv_num_blocks, full_kv_indices = dense_to_ordered(full_bm)
return BlockMask.from_kv_blocks(
kv_num_blocks,
kv_indices,
full_kv_num_blocks,
full_kv_indices,
BLOCK_SIZE=BLOCK_SIZE,
mask_mod=document_causal,
)
block_mask = create_doc_swc_block_mask(sliding_window_num_blocks)
x0 = norm(self.embed(inputs[None]).bfloat16()) # use of norm here by @Grad62304977
x = x0
ve = self.value_embeds(inputs)
assert len(ve) == len(self.blocks)
ve_enc, ve_dec = ve[:self.num_encoder_layers], ve[self.num_encoder_layers:]
# Store outputs for U-Net skip connections
skip_connections = []
# Encoder pass - process only the first half of the blocks
for i in range(self.num_encoder_layers):
x = self.blocks[i](x, ve_enc[i], x0, block_mask)
skip_connections.append(x)
# Decoder pass - process the remaining blocks with weighted skip connections
for i in range(self.num_decoder_layers):
x = x + self.skip_weights[i] * skip_connections.pop()
# U-net structure on token value embeddings by @leloykun
x = self.blocks[self.num_encoder_layers + i](x, ve_dec[i], x0, block_mask)
x = norm(x)
logits = self.lm_head(x)
logits = 15 * torch.tanh(logits / 15) # @Grad62304977 added tanh softcapping, @KoszarskyB reduced it from 30 to 15
logits = logits.float()
loss = F.cross_entropy(logits.view(-1, logits.size(-1)), targets)
return loss
# -----------------------------------------------------------------------------
# Our own simple Distributed Data Loader
def _load_data_shard(path):
# only reads the header, returns header data
# header is 256 int32
header = torch.from_file(path, False, 256, dtype=torch.int32)
assert header[0] == 20240520, 'magic number mismatch in the data .bin file'
assert header[1] == 1, 'unsupported version'
num_tokens = int(header[2]) # number of tokens (claimed)
with open(path, 'rb', buffering=0) as f:
tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) # avoid pin_memory copy by @YouJiacheng
f.seek(256 * 4)
nbytes = f.readinto(tokens.numpy()) # avoid bytes->array copy by @YouJiacheng
assert nbytes == 2 * num_tokens, 'number of tokens read does not match header'
return tokens
class DistributedDataLoader:
def __init__(self, filename_pattern):
self.rank = int(os.environ['RANK'])
self.world_size = int(os.environ['WORLD_SIZE'])
self.files = sorted(glob.glob(filename_pattern))
self.reset()
def reset(self):
self.current_shard = -1
self.advance()
def advance(self):
self.current_shard = (self.current_shard + 1) % len(self.files)
self.current_position = 0
self.tokens = _load_data_shard(self.files[self.current_shard])
def next_batch(self, batch_size):
assert batch_size % self.world_size == 0
device_batch_size = batch_size // self.world_size
# load next shard if necessary
if self.current_position + batch_size + 1 >= len(self.tokens):
self.advance()
pos = self.current_position + self.rank * device_batch_size
device_batch_tokens = self.tokens[pos:pos+device_batch_size+1]
# advance current position
self.current_position += batch_size
inputs = device_batch_tokens[:-1].to(device='cuda', dtype=torch.int32, non_blocking=True)
targets = device_batch_tokens[1:].to(device='cuda', dtype=torch.int64, non_blocking=True)
return inputs, targets
# -----------------------------------------------------------------------------
# int main
@dataclass
class Hyperparameters:
# data
train_bin = 'data/fineweb10B/fineweb_train_*.bin' # input .bin to train on
val_bin = 'data/fineweb10B/fineweb_val_*.bin' # input .bin to eval validation loss on
# optimization
batch_size = 8*64*1024 # batch size in tokens
max_device_batch_size = 64*1024 # batch size per device in tokens
num_iterations = 1390 # number of iterations to run
cooldown_frac = 0.4 # fraction of training spent cooling down the learning rate
bf16_embeds = True
# evaluation and logging
val_loss_every = 25 #125 # every how many steps to evaluate val loss? 0 for only at the end
val_tokens = 10485760 # how many tokens of validation data? it's important to keep this fixed for consistent comparisons
# implementation
save_checkpoint = False
args = Hyperparameters()
micro_bs = args.max_device_batch_size
# set up DDP (distributed data parallel). torchrun sets this env variable
rank = int(os.environ['RANK'])
local_rank = int(os.environ['LOCAL_RANK'])
world_size = int(os.environ['WORLD_SIZE'])
assert torch.cuda.is_available()
torch.cuda.set_device(local_rank)
dist.init_process_group(backend='nccl', device_id=torch.device(local_rank))
dist.barrier()
master_process = (rank == 0) # this process will do logging, checkpointing etc.
# begin logging
logfile = None
if master_process:
run_id = uuid.uuid4()
os.makedirs('logs', exist_ok=True)
logfile = f'logs/{run_id}.txt'
print(logfile)
def print0(s, console=False):
if master_process:
with open(logfile, 'a') as f:
if console:
print(s)
print(s, file=f)
# begin by printing this file (the Python code)
print0(code)
print0('='*100)
# log information about the hardware/software environment this is running on
print0(f'Running Python {sys.version}')
print0(f'Running PyTorch {torch.version.__version__} compiled for CUDA {torch.version.cuda}')
print0(subprocess.run(['nvidia-smi'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True).stdout)
print0('='*100)
# load data
train_loader = DistributedDataLoader(args.train_bin)
val_loader = DistributedDataLoader(args.val_bin)
print0(f'Training dataloader files: {train_loader.files}')
print0(f'Validation dataloader files: {val_loader.files}')
print0('='*100)
# init model_opt dict, this will hold all of the separate models that we use here
# (Both the lrs and the momentums are defined as ranges further below)
#outer_opt_lr = 1.0 #0.7
#outer_opt_momentum = .9
models_opts_schedulers = []
num_models_to_simulate = 8
#diloco_update_steps = 1
diloco_update_steps = 25 #10
# Steps to compile before copying out the model to its replicas
compile_steps = 15 #2 #20
###############################################
# Make Logarithmic DiLoCo Update Schedule #
###############################################
#total_steps = 1390
#spacing_factor = 150 # the original schedule is scaled by this, this impacts how quickly the initial steps grow
# Calculate total update steps, add one if it's not divided evenly since we need 1 more for the final step in training
is_final_update_step = (args.num_iterations % diloco_update_steps > 0)
orig_schedule = torch.range(diloco_update_steps, args.num_iterations - (args.num_iterations % diloco_update_steps), step=diloco_update_steps, dtype=torch.int, device=torch.device('cuda'))
if is_final_update_step:
orig_schedule = torch.cat([orig_schedule, torch.tensor([args.num_iterations], dtype=torch.int, device=torch.device('cuda'))], dim=-1)
# We will need to reverse this -- note we use the orig_arange, not orig_schedule, as we still need to add the last step
#log_schedule = torch.log1p(orig_schedule.float() / spacing_factor)
log_schedule = torch.log1p(orig_schedule.float()) #### / spacing_factor)
# we need to reverse this before passing it through the logarithm so the density works properly (dense early, slow later)
log_schedule = log_schedule.max() - log_schedule
# linearly scale to match the original range
log_schedule = log_schedule * (args.num_iterations / log_schedule.max())
# cast to int and flip for proper ordering
log_schedule = log_schedule.int().flip(dims=(-1,))
# semi-hacky for now, but makes code logic easier
log_schedule = log_schedule.tolist()
# Rebinding for hacking convenience (if we need to hack a different schedule in)
#diloco_outer_schedule = log_schedule
diloco_outer_schedule = orig_schedule.tolist()
print("diloco outer schedule!", diloco_outer_schedule)
print("num outer diloco update steps!", len(diloco_outer_schedule))
# there are only 50257 unique GPT-2 tokens; we extend to nearest multiple of 128 for efficiency. suggested to me by @Grad62304977.
# this originates from Karpathy's experiments.
core_model = GPT(vocab_size=50304, num_layers=12, num_heads=6, model_dim=768)
core_model = core_model.cuda()
if args.bf16_embeds:
for m in core_model.modules():
if isinstance(m, nn.Embedding):
m.bfloat16()
core_model = torch.compile(core_model)
#####ddp_model = DDP(model, device_ids=[local_rank], broadcast_buffers=False, gradient_as_bucket_view=True)
# Add outer Nesterov optimizer to the core model
# The hyperparameters for these are updated every step, so we set them to reasonable defaults instead
outer_opt = torch.optim.SGD(core_model.parameters(), lr=1.0, momentum=0.9, nesterov=True)
######################################################################################
# Set Initial Momentum To 0 in outer_opt (PyTorch bug w/ first step dampening) #
######################################################################################
for parameter in core_model.parameters():
parameter.grad = torch.zeros_like(parameter)
# Set outer opt momentum buffers (best to do this internally to avoid spaghetti code)
outer_opt.step()
core_model.zero_grad(set_to_none=True)
print("Compiling model!")
# call model so it is properly built, before cloning
for _ in range(compile_steps):
core_model.forward(torch.randint(0, 128, (1024*64,)).to(device='cuda', dtype=torch.long), torch.randint(0, 128, (1024*64,)).to(device='cuda', dtype=torch.long), torch.tensor([128], device='cuda', dtype=torch.long)).mean().backward()
# Set gradients to none
core_model.zero_grad(set_to_none=True)
print("Model compiled.")
# tmp dev import
import copy
for _ in range(num_models_to_simulate):
# make model copy
model_copy = copy.deepcopy(core_model)
# collect the parameters to optimize
hidden_matrix_params = [p for p in model_copy.blocks.parameters() if p.ndim == 2]
embed_params = [model_copy.embed.weight, *model_copy.value_embeds.parameters()]
scalar_params = [p for p in model_copy.parameters() if p.ndim < 2]
head_params = [model_copy.lm_head.weight]
# init the optimizer(s)
optimizer1 = torch.optim.Adam([dict(params=embed_params, lr=0.6),
dict(params=head_params, lr=0.008),
dict(params=scalar_params, lr=0.04)],
betas=(0.8, 0.95), fused=True)
optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95)
optimizers = [optimizer1, optimizer2]
# learning rate schedule: stable then decay
def get_lr(it):
t = 1 - it / args.num_iterations # time remaining in training
assert 1 >= t > 0
# 1) constant lr for first part of training
if t >= args.cooldown_frac:
return 1.0
# 2) then linear cooldown
else:
return t / args.cooldown_frac
schedulers = [torch.optim.lr_scheduler.LambdaLR(opt, get_lr) for opt in optimizers]
models_opts_schedulers.append((model_copy, optimizers, schedulers))
outer_opt_scheduler = torch.optim.lr_scheduler.LambdaLR(outer_opt, get_lr)
# sliding window size schedule: linear increase over training in chunks of 128 from 128 -> 1792. By @fernbear.bsky.social
def get_sliding_window_blocks(it):
x = it / args.num_iterations # training progress
assert 0 <= x <= 1
return int(((1 - x) * 128 + x * 1856) // 128)
sliding_window_num_blocks = torch.tensor(1, dtype=torch.int32, device='cuda')
# Start training loop
training_time_ms = 0
# start the clock
torch.cuda.synchronize()
t0 = time.perf_counter()
# begin training
train_steps = args.num_iterations
for step in range(train_steps + 1):
last_step = (step == train_steps)
# This effectively ignores timing first 10 steps, which are slower for weird reasons.
# Alternately, and slightly more correctly in terms of benchmarking, we could do 10
# steps with dummy data first, and then re-initialize the model and reset the loader.
if step == 10:
training_time_ms = 0
t0 = time.perf_counter()
timed_steps = float('nan') if step <= 11 else (step - 10) + 1 # <= 11 to avoid bug in val
sliding_window_num_blocks.copy_(get_sliding_window_blocks(step))
# --------------- VALIDATION SECTION -----------------
#if last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0):
# Only eval after every DiLoCo update step
if last_step or (args.val_loss_every > 0 and step in diloco_outer_schedule): #step % args.val_loss_every == 0):
# stop the clock
torch.cuda.synchronize()
training_time_ms += 1000 * (time.perf_counter() - t0)
# run validation batches
core_model.eval()
val_loader.reset()
val_loss = 0.0
# calculate the number of steps to take in the val loop.
val_batch_size = world_size * micro_bs
assert args.val_tokens % val_batch_size == 0
val_steps = args.val_tokens // val_batch_size
for _ in range(val_steps):
with torch.no_grad():
inputs_val, targets_val = val_loader.next_batch(val_batch_size)
val_loss += core_model(inputs_val, targets_val, sliding_window_num_blocks)
dist.all_reduce(val_loss, op=dist.ReduceOp.AVG)
val_loss /= val_steps
# logging
print0(f'step:{step}/{train_steps} val_loss:{val_loss:.4f} train_time:{training_time_ms:.0f}ms step_avg:{training_time_ms/(timed_steps-1):.2f}ms', console=True)
# start the clock again
torch.cuda.synchronize()
t0 = time.perf_counter()
if last_step:
if master_process and args.save_checkpoint:
log = dict(step=step, code=code, model=model.state_dict(), optimizers=[opt.state_dict() for opt in optimizers])
os.makedirs(f'logs/{run_id}', exist_ok=True)
torch.save(log, f'logs/{run_id}/state_step{step:06d}.pt')
# the last step only has the validation loop, so break to avoid training
break
# --------------- TRAINING SECTION -----------------
#model.train()
# set each model to train
for model, _, _ in models_opts_schedulers:
model.train()
batch_size = args.batch_size
assert batch_size % world_size == 0
inputs_train, targets_train = train_loader.next_batch(batch_size)
assert len(inputs_train) <= micro_bs or len(inputs_train) % micro_bs == 0
assert batch_size//micro_bs == len(models_opts_schedulers), "Microbatchsize and number of model_opt pairs need to be equal in this experiment (functions may need to be written to support iteration over model pairs instead of indexing by microbatch idx)."
for i, (micro_inputs_train, micro_targets_train) in enumerate(zip(inputs_train.split(micro_bs), targets_train.split(micro_bs))):
# forward on distinct model
models_opts_schedulers[i][0](micro_inputs_train, micro_targets_train, sliding_window_num_blocks).backward()
#model(micro_inputs_train, micro_targets_train, sliding_window_num_blocks).backward()
# momentum warmup for Muon
frac = min(step/300, 1)
for model, opts, schedulers in models_opts_schedulers:
# update momentum for muon in each group
for group in opts[1].param_groups: #optimizer2.param_groups:
group['momentum'] = (1 - frac) * 0.85 + frac * 0.95
# step the optimizers and schedulers
for opt, sched in zip(opts, schedulers):
opt.step()
if step != train_steps-1:
sched.step()
# null the gradients
model.zero_grad(set_to_none=True)
#############################################
# DiLoCo Outer Loop (Distributed) Updates #
#############################################
# Update core model w/ updates from other models (optionally on different timescales for different parts, just simply 1 step per for now)
# Zip all parameters together, so we can stack them then average them, then merge them to the core model
#if last_step or (step != 0 and step % diloco_update_steps == 0):
# Update DiLoCo on a logarithmically-based schedule now
if last_step or (step != 0 and step in diloco_outer_schedule): #% diloco_update_steps == 0):
models_group_params = [mos[0].parameters() for mos in models_opts_schedulers]
models_grouped_params = zip(*models_group_params)
##################
# Momentum #
##################
outer_opt_momentum_warmup_steps = 1000 #300 #250 #500 #100 #300 #600 #300
outer_opt_max_base_lr = 1.
outer_opt_min_base_lr = .7
outer_opt_min_momentum = 0. #.6 #.9 #.6 #.5 #.6
outer_opt_max_momentum = .9 #.85 #.95 #.9 #.9
frac = min(step/outer_opt_momentum_warmup_steps, 1)
curr_outer_momentum = (1 - frac) * outer_opt_min_momentum + frac * outer_opt_max_momentum
# Hacky, this should be consolidated into one single function for the LR scheduler
curr_outer_base_lr = (frac * outer_opt_min_base_lr + (1. - frac) * outer_opt_max_base_lr)
#################
# Dampening #
#################
curr_dampening = 0.0 # tmp for now
"""
# damping hparams
dampening_steps = 300 #100 #300
dampening_max = 0.6
dampening_min = 0.0
frac = min(step/dampening_steps, 1)
curr_dampening = (1 - frac) * dampening_max + frac * dampening_min
"""
# update momentum for each param group in outer opt
for group in outer_opt.param_groups: #optimizer2.param_groups:
#group['lr'] = outer_opt_lr * get_lr(step)
group['lr'] = curr_outer_base_lr * get_lr(step)
group['momentum'] = curr_outer_momentum
#group['dampening'] = curr_dampening
group['dampening'] = 1. - curr_outer_momentum #curr_dampening
for core_parameters, dist_parameters_list in zip(core_model.parameters(), models_grouped_params):
# TODO: individual parameter schedules?
# TMP hack
#params_list = list(dist_parameters_list)
#dist_parameters_list = params_list
# Simulate grad creation
grads_all = (core_parameters.data.unsqueeze(0) - torch.stack(dist_parameters_list, dim=0))
core_parameters.grad = grads_all.mean(dim=0)
# Simulate update # reduce_mean
#parameters.data.add_(grads, alpha=-diloco_lr) # = torch.stack(dist_parameters_list, dim=0).mean(dim=0)
# If this is the first outer step, PyTorch defaults to filling the momentum buffer with
# the grad, which is a horribly-biased estimator of the state of the network over training
# Here, to account for the momentum warmup-process removing this zero-debiasing operation,
# we simply act as if the momentum buffer zero for the first step (i.e. simply averaging
# the network weights), and then let momemtum and warmup do their things from there
# W/ the nesterov step, this means halving the initial first grad
"""
if step == 0: #diloco_update_steps:
#outer_opt.state[core_parameters]['momentum_buffer'].data.zero_()
core_parameters.grad.data.div_(2./(2.-dampening_max))
"""
# outer_opt step update
outer_opt.step()
outer_opt.zero_grad(set_to_none=True)
# update model copies to reset to original value (would be done locally by each distributed worker)
for dist_params in dist_parameters_list:
dist_params.data.copy_(core_parameters.data)
# Update core model for evals
####parameters.data = torch.stack(dist_parameters_list, dim=0).mean(dim=0)
# Simulate broadcast back out (use this if not using distributed grads, but using the core model to sync instead)
#[d_param.data.copy_(parameters.data) for d_param in dist_parameters_list]
# logging
approx_time = training_time_ms + 1000 * (time.perf_counter() - t0)
print0(f'step:{step+1}/{train_steps} train_time:{approx_time:.0f}ms step_avg:{approx_time/timed_steps:.2f}ms', console=True)
print0(f'peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB')
dist.destroy_process_group()
====================================================================================================
Running Python 3.12.7 (main, May 14 2025, 19:28:05) [GCC 13.2.0]
Running PyTorch 2.8.0.dev20250510+cu126 compiled for CUDA 12.6
Wed May 14 21:45:31 2025
+-----------------------------------------------------------------------------------------+
| NVIDIA-SMI 550.144.03 Driver Version: 550.144.03 CUDA Version: 12.4 |
|-----------------------------------------+------------------------+----------------------+
| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC |
| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |
| | | MIG M. |
|=========================================+========================+======================|
| 0 NVIDIA H100 80GB HBM3 On | 00000000:0F:00.0 Off | 0 |
| N/A 37C P0 130W / 700W | 1180MiB / 81559MiB | 2% Default |
| | | Disabled |
+-----------------------------------------+------------------------+----------------------+
+-----------------------------------------------------------------------------------------+
| Processes: |
| GPU GI CI PID Type Process name GPU Memory |
| ID ID Usage |
|=========================================================================================|
+-----------------------------------------------------------------------------------------+
====================================================================================================
Training dataloader files: ['data/fineweb10B/fineweb_train_000001.bin', 'data/fineweb10B/fineweb_train_000002.bin', 'data/fineweb10B/fineweb_train_000003.bin', 'data/fineweb10B/fineweb_train_000004.bin', 'data/fineweb10B/fineweb_train_000005.bin', 'data/fineweb10B/fineweb_train_000006.bin', 'data/fineweb10B/fineweb_train_000007.bin', 'data/fineweb10B/fineweb_train_000008.bin']
Validation dataloader files: ['data/fineweb10B/fineweb_val_000000.bin']
====================================================================================================
step:1/1390 train_time:53593ms step_avg:nanms
step:2/1390 train_time:54691ms step_avg:nanms
step:3/1390 train_time:55846ms step_avg:nanms
step:4/1390 train_time:57014ms step_avg:nanms
step:5/1390 train_time:58186ms step_avg:nanms
step:6/1390 train_time:59357ms step_avg:nanms
step:7/1390 train_time:60529ms step_avg:nanms
step:8/1390 train_time:61695ms step_avg:nanms
step:9/1390 train_time:62877ms step_avg:nanms
step:10/1390 train_time:64057ms step_avg:nanms
step:11/1390 train_time:1185ms step_avg:nanms
step:12/1390 train_time:2360ms step_avg:nanms
step:13/1390 train_time:3539ms step_avg:1179.61ms
step:14/1390 train_time:4721ms step_avg:1180.32ms
step:15/1390 train_time:5897ms step_avg:1179.32ms
step:16/1390 train_time:7088ms step_avg:1181.36ms
step:17/1390 train_time:8260ms step_avg:1180.00ms
step:18/1390 train_time:9445ms step_avg:1180.58ms
step:19/1390 train_time:10619ms step_avg:1179.94ms
step:20/1390 train_time:11794ms step_avg:1179.42ms
step:21/1390 train_time:12978ms step_avg:1179.81ms
step:22/1390 train_time:14155ms step_avg:1179.61ms
step:23/1390 train_time:15337ms step_avg:1179.73ms
step:24/1390 train_time:16522ms step_avg:1180.18ms
step:25/1390 train_time:17709ms step_avg:1180.62ms
step:25/1390 val_loss:10.8258 train_time:17710ms step_avg:1180.64ms
step:26/1390 train_time:18909ms step_avg:1181.84ms
step:27/1390 train_time:20104ms step_avg:1182.60ms
step:28/1390 train_time:21282ms step_avg:1182.32ms
step:29/1390 train_time:22457ms step_avg:1181.94ms
step:30/1390 train_time:23631ms step_avg:1181.57ms
step:31/1390 train_time:24810ms step_avg:1181.41ms
step:32/1390 train_time:25987ms step_avg:1181.21ms
step:33/1390 train_time:27165ms step_avg:1181.08ms
step:34/1390 train_time:28345ms step_avg:1181.05ms
step:35/1390 train_time:29534ms step_avg:1181.34ms
step:36/1390 train_time:30707ms step_avg:1181.05ms
step:37/1390 train_time:31888ms step_avg:1181.03ms
step:38/1390 train_time:33067ms step_avg:1180.97ms
step:39/1390 train_time:34247ms step_avg:1180.94ms
step:40/1390 train_time:35434ms step_avg:1181.12ms
step:41/1390 train_time:36612ms step_avg:1181.04ms
step:42/1390 train_time:37795ms step_avg:1181.10ms
step:43/1390 train_time:38976ms step_avg:1181.10ms
step:44/1390 train_time:40156ms step_avg:1181.06ms
step:45/1390 train_time:41338ms step_avg:1181.09ms
step:46/1390 train_time:42528ms step_avg:1181.34ms
step:47/1390 train_time:43719ms step_avg:1181.59ms
step:48/1390 train_time:44911ms step_avg:1181.87ms
step:49/1390 train_time:46095ms step_avg:1181.92ms
step:50/1390 train_time:47284ms step_avg:1182.10ms
step:50/1390 val_loss:6.3451 train_time:47284ms step_avg:1182.11ms
step:51/1390 train_time:48506ms step_avg:1183.06ms
step:52/1390 train_time:49707ms step_avg:1183.51ms
step:53/1390 train_time:50887ms step_avg:1183.43ms
step:54/1390 train_time:52074ms step_avg:1183.51ms
step:55/1390 train_time:53259ms step_avg:1183.54ms
step:56/1390 train_time:54455ms step_avg:1183.80ms
step:57/1390 train_time:55653ms step_avg:1184.11ms
step:58/1390 train_time:56834ms step_avg:1184.05ms
step:59/1390 train_time:58031ms step_avg:1184.30ms
step:60/1390 train_time:59221ms step_avg:1184.42ms
step:61/1390 train_time:60423ms step_avg:1184.76ms
step:62/1390 train_time:61614ms step_avg:1184.89ms
step:63/1390 train_time:62794ms step_avg:1184.79ms
step:64/1390 train_time:63976ms step_avg:1184.74ms
step:65/1390 train_time:65171ms step_avg:1184.92ms
step:66/1390 train_time:66350ms step_avg:1184.83ms
step:67/1390 train_time:67534ms step_avg:1184.81ms
step:68/1390 train_time:68721ms step_avg:1184.84ms
step:69/1390 train_time:69910ms step_avg:1184.92ms
step:70/1390 train_time:71094ms step_avg:1184.90ms
step:71/1390 train_time:72289ms step_avg:1185.06ms
step:72/1390 train_time:73472ms step_avg:1185.04ms
step:73/1390 train_time:74650ms step_avg:1184.92ms
step:74/1390 train_time:75828ms step_avg:1184.81ms
step:75/1390 train_time:77009ms step_avg:1184.75ms
step:75/1390 val_loss:5.6156 train_time:77009ms step_avg:1184.75ms
step:76/1390 train_time:78218ms step_avg:1185.12ms
step:77/1390 train_time:79415ms step_avg:1185.30ms
step:78/1390 train_time:80603ms step_avg:1185.34ms
step:79/1390 train_time:81777ms step_avg:1185.18ms
step:80/1390 train_time:82955ms step_avg:1185.07ms
step:81/1390 train_time:84139ms step_avg:1185.05ms
step:82/1390 train_time:85317ms step_avg:1184.95ms
step:83/1390 train_time:86509ms step_avg:1185.06ms
step:84/1390 train_time:87701ms step_avg:1185.15ms
step:85/1390 train_time:88880ms step_avg:1185.07ms
step:86/1390 train_time:90062ms step_avg:1185.03ms
step:87/1390 train_time:91238ms step_avg:1184.90ms
step:88/1390 train_time:92420ms step_avg:1184.87ms
step:89/1390 train_time:93604ms step_avg:1184.86ms
step:90/1390 train_time:94782ms step_avg:1184.78ms
step:91/1390 train_time:95962ms step_avg:1184.71ms
step:92/1390 train_time:97146ms step_avg:1184.70ms
step:93/1390 train_time:98327ms step_avg:1184.66ms
step:94/1390 train_time:99516ms step_avg:1184.72ms
step:95/1390 train_time:100706ms step_avg:1184.77ms
step:96/1390 train_time:101890ms step_avg:1184.77ms
step:97/1390 train_time:103077ms step_avg:1184.80ms
step:98/1390 train_time:104257ms step_avg:1184.74ms
step:99/1390 train_time:105450ms step_avg:1184.83ms
step:100/1390 train_time:106642ms step_avg:1184.91ms
step:100/1390 val_loss:5.2806 train_time:106642ms step_avg:1184.91ms
step:101/1390 train_time:107865ms step_avg:1185.33ms
step:102/1390 train_time:109074ms step_avg:1185.59ms
step:103/1390 train_time:110259ms step_avg:1185.58ms
step:104/1390 train_time:111461ms step_avg:1185.76ms
step:105/1390 train_time:112667ms step_avg:1185.97ms
step:106/1390 train_time:113879ms step_avg:1186.24ms
step:107/1390 train_time:115087ms step_avg:1186.46ms
step:108/1390 train_time:116285ms step_avg:1186.58ms
step:109/1390 train_time:117488ms step_avg:1186.75ms
step:110/1390 train_time:118695ms step_avg:1186.95ms
step:111/1390 train_time:119901ms step_avg:1187.14ms
step:112/1390 train_time:121110ms step_avg:1187.35ms
step:113/1390 train_time:122314ms step_avg:1187.51ms
step:114/1390 train_time:123517ms step_avg:1187.66ms
step:115/1390 train_time:124731ms step_avg:1187.92ms
step:116/1390 train_time:125938ms step_avg:1188.09ms
step:117/1390 train_time:127143ms step_avg:1188.26ms
step:118/1390 train_time:128354ms step_avg:1188.46ms
step:119/1390 train_time:129559ms step_avg:1188.61ms
step:120/1390 train_time:130769ms step_avg:1188.81ms
step:121/1390 train_time:131989ms step_avg:1189.09ms
step:122/1390 train_time:133198ms step_avg:1189.27ms
step:123/1390 train_time:134409ms step_avg:1189.46ms
step:124/1390 train_time:135621ms step_avg:1189.66ms
step:125/1390 train_time:136835ms step_avg:1189.87ms
step:125/1390 val_loss:5.0292 train_time:136835ms step_avg:1189.87ms
step:126/1390 train_time:138071ms step_avg:1190.27ms
step:127/1390 train_time:139289ms step_avg:1190.51ms
step:128/1390 train_time:140496ms step_avg:1190.64ms
step:129/1390 train_time:141700ms step_avg:1190.76ms
step:130/1390 train_time:142905ms step_avg:1190.87ms
step:131/1390 train_time:144111ms step_avg:1191.00ms
step:132/1390 train_time:145310ms step_avg:1191.06ms
step:133/1390 train_time:146514ms step_avg:1191.17ms
step:134/1390 train_time:147715ms step_avg:1191.25ms
step:135/1390 train_time:148921ms step_avg:1191.37ms
step:136/1390 train_time:150128ms step_avg:1191.49ms
step:137/1390 train_time:151341ms step_avg:1191.66ms
step:138/1390 train_time:152551ms step_avg:1191.81ms
step:139/1390 train_time:153761ms step_avg:1191.94ms
step:140/1390 train_time:154974ms step_avg:1192.11ms
step:141/1390 train_time:156181ms step_avg:1192.22ms
step:142/1390 train_time:157388ms step_avg:1192.34ms
step:143/1390 train_time:158598ms step_avg:1192.47ms
step:144/1390 train_time:159810ms step_avg:1192.61ms
step:145/1390 train_time:161015ms step_avg:1192.71ms
step:146/1390 train_time:162221ms step_avg:1192.80ms
step:147/1390 train_time:163433ms step_avg:1192.94ms
step:148/1390 train_time:164643ms step_avg:1193.07ms
step:149/1390 train_time:165853ms step_avg:1193.19ms
step:150/1390 train_time:167062ms step_avg:1193.30ms
step:150/1390 val_loss:4.8101 train_time:167063ms step_avg:1193.30ms
step:151/1390 train_time:168293ms step_avg:1193.57ms
step:152/1390 train_time:169510ms step_avg:1193.73ms
step:153/1390 train_time:170719ms step_avg:1193.84ms
step:154/1390 train_time:171926ms step_avg:1193.93ms
step:155/1390 train_time:173134ms step_avg:1194.03ms
step:156/1390 train_time:174336ms step_avg:1194.08ms
step:157/1390 train_time:175546ms step_avg:1194.19ms
step:158/1390 train_time:176751ms step_avg:1194.27ms
step:159/1390 train_time:177967ms step_avg:1194.41ms
step:160/1390 train_time:179166ms step_avg:1194.44ms
step:161/1390 train_time:180372ms step_avg:1194.52ms
step:162/1390 train_time:181583ms step_avg:1194.63ms
step:163/1390 train_time:182792ms step_avg:1194.72ms
step:164/1390 train_time:184000ms step_avg:1194.81ms
step:165/1390 train_time:185209ms step_avg:1194.90ms
step:166/1390 train_time:186417ms step_avg:1194.98ms
step:167/1390 train_time:187625ms step_avg:1195.06ms
step:168/1390 train_time:188835ms step_avg:1195.16ms
step:169/1390 train_time:190044ms step_avg:1195.25ms
step:170/1390 train_time:191251ms step_avg:1195.32ms
step:171/1390 train_time:192451ms step_avg:1195.35ms
step:172/1390 train_time:193658ms step_avg:1195.42ms
step:173/1390 train_time:194875ms step_avg:1195.55ms
step:174/1390 train_time:196083ms step_avg:1195.63ms
step:175/1390 train_time:197289ms step_avg:1195.69ms
step:175/1390 val_loss:4.6269 train_time:197289ms step_avg:1195.69ms
step:176/1390 train_time:198521ms step_avg:1195.91ms
step:177/1390 train_time:199738ms step_avg:1196.04ms
step:178/1390 train_time:200951ms step_avg:1196.13ms
step:179/1390 train_time:202156ms step_avg:1196.19ms
step:180/1390 train_time:203362ms step_avg:1196.25ms
step:181/1390 train_time:204566ms step_avg:1196.29ms
step:182/1390 train_time:205777ms step_avg:1196.38ms
step:183/1390 train_time:206979ms step_avg:1196.41ms
step:184/1390 train_time:208184ms step_avg:1196.46ms
step:185/1390 train_time:209389ms step_avg:1196.51ms
step:186/1390 train_time:210595ms step_avg:1196.56ms
step:187/1390 train_time:211803ms step_avg:1196.63ms
step:188/1390 train_time:213005ms step_avg:1196.66ms
step:189/1390 train_time:214212ms step_avg:1196.72ms
step:190/1390 train_time:215418ms step_avg:1196.77ms
step:191/1390 train_time:216668ms step_avg:1197.06ms
step:192/1390 train_time:217879ms step_avg:1197.14ms
step:193/1390 train_time:219089ms step_avg:1197.21ms
step:194/1390 train_time:220290ms step_avg:1197.23ms
step:195/1390 train_time:221499ms step_avg:1197.29ms
step:196/1390 train_time:222708ms step_avg:1197.36ms
step:197/1390 train_time:223917ms step_avg:1197.42ms
step:198/1390 train_time:225122ms step_avg:1197.46ms
step:199/1390 train_time:226331ms step_avg:1197.52ms
step:200/1390 train_time:227538ms step_avg:1197.57ms
step:200/1390 val_loss:4.4900 train_time:227538ms step_avg:1197.57ms
step:201/1390 train_time:228778ms step_avg:1197.79ms
step:202/1390 train_time:229998ms step_avg:1197.91ms
step:203/1390 train_time:231203ms step_avg:1197.94ms
step:204/1390 train_time:232410ms step_avg:1197.99ms
step:205/1390 train_time:233624ms step_avg:1198.07ms
step:206/1390 train_time:234834ms step_avg:1198.13ms
step:207/1390 train_time:236058ms step_avg:1198.26ms
step:208/1390 train_time:237283ms step_avg:1198.40ms
step:209/1390 train_time:238516ms step_avg:1198.57ms
step:210/1390 train_time:239746ms step_avg:1198.73ms
step:211/1390 train_time:240984ms step_avg:1198.93ms
step:212/1390 train_time:242209ms step_avg:1199.05ms
step:213/1390 train_time:243445ms step_avg:1199.24ms
step:214/1390 train_time:244688ms step_avg:1199.45ms
step:215/1390 train_time:245922ms step_avg:1199.62ms
step:216/1390 train_time:247154ms step_avg:1199.78ms
step:217/1390 train_time:248383ms step_avg:1199.92ms
step:218/1390 train_time:249613ms step_avg:1200.06ms
step:219/1390 train_time:250858ms step_avg:1200.28ms
step:220/1390 train_time:252093ms step_avg:1200.44ms
step:221/1390 train_time:253329ms step_avg:1200.61ms
step:222/1390 train_time:254566ms step_avg:1200.78ms
step:223/1390 train_time:255802ms step_avg:1200.95ms
step:224/1390 train_time:257031ms step_avg:1201.08ms
step:225/1390 train_time:258266ms step_avg:1201.24ms
step:225/1390 val_loss:4.3660 train_time:258266ms step_avg:1201.24ms
step:226/1390 train_time:259532ms step_avg:1201.54ms
step:227/1390 train_time:260770ms step_avg:1201.70ms
step:228/1390 train_time:262003ms step_avg:1201.85ms
step:229/1390 train_time:263237ms step_avg:1202.00ms
step:230/1390 train_time:264470ms step_avg:1202.14ms
step:231/1390 train_time:265706ms step_avg:1202.29ms
step:232/1390 train_time:266928ms step_avg:1202.38ms
step:233/1390 train_time:268163ms step_avg:1202.52ms
step:234/1390 train_time:269392ms step_avg:1202.64ms
step:235/1390 train_time:270629ms step_avg:1202.80ms
step:236/1390 train_time:271852ms step_avg:1202.88ms
step:237/1390 train_time:273086ms step_avg:1203.02ms
step:238/1390 train_time:274319ms step_avg:1203.15ms
step:239/1390 train_time:275548ms step_avg:1203.27ms
step:240/1390 train_time:276780ms step_avg:1203.39ms
step:241/1390 train_time:278009ms step_avg:1203.50ms
step:242/1390 train_time:279242ms step_avg:1203.63ms
step:243/1390 train_time:280471ms step_avg:1203.74ms
step:244/1390 train_time:281699ms step_avg:1203.84ms
step:245/1390 train_time:282930ms step_avg:1203.96ms
step:246/1390 train_time:284167ms step_avg:1204.10ms
step:247/1390 train_time:285400ms step_avg:1204.22ms
step:248/1390 train_time:286632ms step_avg:1204.34ms
step:249/1390 train_time:287864ms step_avg:1204.45ms
step:250/1390 train_time:289095ms step_avg:1204.56ms
step:250/1390 val_loss:4.2791 train_time:289095ms step_avg:1204.56ms
step:251/1390 train_time:290353ms step_avg:1204.78ms
step:252/1390 train_time:291600ms step_avg:1204.96ms
step:253/1390 train_time:292829ms step_avg:1205.06ms
step:254/1390 train_time:294062ms step_avg:1205.17ms
step:255/1390 train_time:295292ms step_avg:1205.27ms
step:256/1390 train_time:296529ms step_avg:1205.40ms
step:257/1390 train_time:297769ms step_avg:1205.54ms
step:258/1390 train_time:299003ms step_avg:1205.66ms
step:259/1390 train_time:300239ms step_avg:1205.78ms
step:260/1390 train_time:301473ms step_avg:1205.89ms
step:261/1390 train_time:302708ms step_avg:1206.01ms
step:262/1390 train_time:303951ms step_avg:1206.15ms
step:263/1390 train_time:305182ms step_avg:1206.25ms
step:264/1390 train_time:306424ms step_avg:1206.39ms
step:265/1390 train_time:307651ms step_avg:1206.47ms
step:266/1390 train_time:308883ms step_avg:1206.57ms
step:267/1390 train_time:310118ms step_avg:1206.69ms
step:268/1390 train_time:311360ms step_avg:1206.82ms
step:269/1390 train_time:312592ms step_avg:1206.92ms
step:270/1390 train_time:313826ms step_avg:1207.02ms
step:271/1390 train_time:315060ms step_avg:1207.13ms
step:272/1390 train_time:316296ms step_avg:1207.24ms
step:273/1390 train_time:317532ms step_avg:1207.35ms
step:274/1390 train_time:318761ms step_avg:1207.43ms
step:275/1390 train_time:319995ms step_avg:1207.53ms
step:275/1390 val_loss:4.2128 train_time:319995ms step_avg:1207.53ms
step:276/1390 train_time:321254ms step_avg:1207.72ms
step:277/1390 train_time:322494ms step_avg:1207.84ms
step:278/1390 train_time:323721ms step_avg:1207.91ms
step:279/1390 train_time:324954ms step_avg:1208.01ms
step:280/1390 train_time:326183ms step_avg:1208.09ms
step:281/1390 train_time:327418ms step_avg:1208.18ms
step:282/1390 train_time:328649ms step_avg:1208.27ms
step:283/1390 train_time:329889ms step_avg:1208.38ms
step:284/1390 train_time:331124ms step_avg:1208.48ms
step:285/1390 train_time:332359ms step_avg:1208.58ms
step:286/1390 train_time:333593ms step_avg:1208.67ms
step:287/1390 train_time:334832ms step_avg:1208.78ms
step:288/1390 train_time:336066ms step_avg:1208.87ms
step:289/1390 train_time:337307ms step_avg:1208.98ms
step:290/1390 train_time:338550ms step_avg:1209.11ms
step:291/1390 train_time:339779ms step_avg:1209.18ms
step:292/1390 train_time:341013ms step_avg:1209.27ms
step:293/1390 train_time:342246ms step_avg:1209.35ms
step:294/1390 train_time:343478ms step_avg:1209.43ms
step:295/1390 train_time:344714ms step_avg:1209.52ms
step:296/1390 train_time:345947ms step_avg:1209.61ms
step:297/1390 train_time:347177ms step_avg:1209.68ms
step:298/1390 train_time:348408ms step_avg:1209.75ms
step:299/1390 train_time:349646ms step_avg:1209.85ms
step:300/1390 train_time:350881ms step_avg:1209.94ms
step:300/1390 val_loss:4.1584 train_time:350882ms step_avg:1209.94ms
step:301/1390 train_time:352150ms step_avg:1210.14ms
step:302/1390 train_time:353389ms step_avg:1210.24ms
step:303/1390 train_time:354620ms step_avg:1210.31ms
step:304/1390 train_time:355856ms step_avg:1210.39ms
step:305/1390 train_time:357091ms step_avg:1210.48ms
step:306/1390 train_time:358324ms step_avg:1210.56ms
step:307/1390 train_time:359554ms step_avg:1210.62ms
step:308/1390 train_time:360786ms step_avg:1210.69ms
step:309/1390 train_time:362020ms step_avg:1210.77ms
step:310/1390 train_time:363276ms step_avg:1210.92ms
step:311/1390 train_time:364517ms step_avg:1211.02ms
step:312/1390 train_time:365777ms step_avg:1211.18ms
step:313/1390 train_time:367037ms step_avg:1211.34ms
step:314/1390 train_time:368293ms step_avg:1211.49ms
step:315/1390 train_time:369539ms step_avg:1211.60ms
step:316/1390 train_time:370789ms step_avg:1211.73ms
step:317/1390 train_time:372046ms step_avg:1211.88ms
step:318/1390 train_time:373300ms step_avg:1212.01ms
step:319/1390 train_time:374553ms step_avg:1212.15ms
step:320/1390 train_time:375799ms step_avg:1212.25ms
step:321/1390 train_time:377053ms step_avg:1212.39ms
step:322/1390 train_time:378306ms step_avg:1212.52ms
step:323/1390 train_time:379553ms step_avg:1212.63ms
step:324/1390 train_time:380798ms step_avg:1212.73ms
step:325/1390 train_time:382057ms step_avg:1212.88ms
step:325/1390 val_loss:4.0941 train_time:382057ms step_avg:1212.88ms
step:326/1390 train_time:383334ms step_avg:1213.08ms
step:327/1390 train_time:384603ms step_avg:1213.26ms
step:328/1390 train_time:385855ms step_avg:1213.38ms
step:329/1390 train_time:387099ms step_avg:1213.48ms
step:330/1390 train_time:388346ms step_avg:1213.58ms
step:331/1390 train_time:389597ms step_avg:1213.70ms
step:332/1390 train_time:390843ms step_avg:1213.80ms
step:333/1390 train_time:392095ms step_avg:1213.92ms
step:334/1390 train_time:393337ms step_avg:1214.00ms
step:335/1390 train_time:394582ms step_avg:1214.10ms
step:336/1390 train_time:395832ms step_avg:1214.21ms
step:337/1390 train_time:397079ms step_avg:1214.31ms
step:338/1390 train_time:398325ms step_avg:1214.41ms
step:339/1390 train_time:399573ms step_avg:1214.51ms
step:340/1390 train_time:400820ms step_avg:1214.61ms
step:341/1390 train_time:402068ms step_avg:1214.71ms
step:342/1390 train_time:403314ms step_avg:1214.80ms
step:343/1390 train_time:404564ms step_avg:1214.91ms
step:344/1390 train_time:405819ms step_avg:1215.03ms
step:345/1390 train_time:407066ms step_avg:1215.12ms
step:346/1390 train_time:408314ms step_avg:1215.22ms
step:347/1390 train_time:409564ms step_avg:1215.32ms
step:348/1390 train_time:410813ms step_avg:1215.42ms
step:349/1390 train_time:412061ms step_avg:1215.52ms
step:350/1390 train_time:413312ms step_avg:1215.62ms
step:350/1390 val_loss:4.0490 train_time:413312ms step_avg:1215.62ms
step:351/1390 train_time:414593ms step_avg:1215.82ms
step:352/1390 train_time:415846ms step_avg:1215.92ms
step:353/1390 train_time:417090ms step_avg:1216.01ms
step:354/1390 train_time:418341ms step_avg:1216.11ms
step:355/1390 train_time:419588ms step_avg:1216.20ms
step:356/1390 train_time:420828ms step_avg:1216.27ms
step:357/1390 train_time:422075ms step_avg:1216.35ms
step:358/1390 train_time:423325ms step_avg:1216.45ms
step:359/1390 train_time:424572ms step_avg:1216.54ms
step:360/1390 train_time:425828ms step_avg:1216.65ms
step:361/1390 train_time:427075ms step_avg:1216.74ms
step:362/1390 train_time:428326ms step_avg:1216.83ms
step:363/1390 train_time:429574ms step_avg:1216.92ms
step:364/1390 train_time:430824ms step_avg:1217.02ms
step:365/1390 train_time:432068ms step_avg:1217.09ms
step:366/1390 train_time:433311ms step_avg:1217.17ms
step:367/1390 train_time:434555ms step_avg:1217.24ms
step:368/1390 train_time:435799ms step_avg:1217.32ms
step:369/1390 train_time:437050ms step_avg:1217.41ms
step:370/1390 train_time:438303ms step_avg:1217.51ms
step:371/1390 train_time:439551ms step_avg:1217.59ms
step:372/1390 train_time:440798ms step_avg:1217.67ms
step:373/1390 train_time:442044ms step_avg:1217.75ms
step:374/1390 train_time:443286ms step_avg:1217.82ms
step:375/1390 train_time:444537ms step_avg:1217.91ms
step:375/1390 val_loss:4.0093 train_time:444537ms step_avg:1217.91ms
step:376/1390 train_time:445806ms step_avg:1218.05ms
step:377/1390 train_time:447067ms step_avg:1218.17ms
step:378/1390 train_time:448316ms step_avg:1218.25ms
step:379/1390 train_time:449560ms step_avg:1218.32ms
step:380/1390 train_time:450809ms step_avg:1218.40ms
step:381/1390 train_time:452094ms step_avg:1218.58ms
step:382/1390 train_time:453341ms step_avg:1218.66ms
step:383/1390 train_time:454582ms step_avg:1218.72ms
step:384/1390 train_time:455825ms step_avg:1218.78ms
step:385/1390 train_time:457072ms step_avg:1218.86ms
step:386/1390 train_time:458328ms step_avg:1218.96ms
step:387/1390 train_time:459578ms step_avg:1219.04ms
step:388/1390 train_time:460826ms step_avg:1219.12ms
step:389/1390 train_time:462073ms step_avg:1219.19ms
step:390/1390 train_time:463323ms step_avg:1219.27ms
step:391/1390 train_time:464575ms step_avg:1219.36ms
step:392/1390 train_time:465836ms step_avg:1219.47ms
step:393/1390 train_time:467082ms step_avg:1219.54ms
step:394/1390 train_time:468332ms step_avg:1219.61ms
step:395/1390 train_time:469585ms step_avg:1219.70ms
step:396/1390 train_time:470830ms step_avg:1219.77ms
step:397/1390 train_time:472082ms step_avg:1219.85ms
step:398/1390 train_time:473335ms step_avg:1219.94ms
step:399/1390 train_time:474591ms step_avg:1220.03ms
step:400/1390 train_time:475840ms step_avg:1220.10ms
step:400/1390 val_loss:3.9720 train_time:475840ms step_avg:1220.10ms
step:401/1390 train_time:477115ms step_avg:1220.24ms
step:402/1390 train_time:478371ms step_avg:1220.33ms
step:403/1390 train_time:479625ms step_avg:1220.42ms
step:404/1390 train_time:480877ms step_avg:1220.50ms
step:405/1390 train_time:482118ms step_avg:1220.55ms
step:406/1390 train_time:483369ms step_avg:1220.63ms
step:407/1390 train_time:484622ms step_avg:1220.71ms
step:408/1390 train_time:485875ms step_avg:1220.79ms
step:409/1390 train_time:487123ms step_avg:1220.86ms
step:410/1390 train_time:488365ms step_avg:1220.91ms
step:411/1390 train_time:489610ms step_avg:1220.97ms
step:412/1390 train_time:490855ms step_avg:1221.03ms
step:413/1390 train_time:492126ms step_avg:1221.16ms
step:414/1390 train_time:493386ms step_avg:1221.25ms
step:415/1390 train_time:494648ms step_avg:1221.35ms
step:416/1390 train_time:495918ms step_avg:1221.47ms
step:417/1390 train_time:497179ms step_avg:1221.57ms
step:418/1390 train_time:498435ms step_avg:1221.65ms
step:419/1390 train_time:499696ms step_avg:1221.75ms
step:420/1390 train_time:500961ms step_avg:1221.86ms
step:421/1390 train_time:502220ms step_avg:1221.95ms
step:422/1390 train_time:503489ms step_avg:1222.06ms
step:423/1390 train_time:504761ms step_avg:1222.18ms
step:424/1390 train_time:506032ms step_avg:1222.30ms
step:425/1390 train_time:507296ms step_avg:1222.40ms
step:425/1390 val_loss:3.9308 train_time:507297ms step_avg:1222.40ms
step:426/1390 train_time:508591ms step_avg:1222.57ms
step:427/1390 train_time:509871ms step_avg:1222.71ms
step:428/1390 train_time:511142ms step_avg:1222.83ms
step:429/1390 train_time:512410ms step_avg:1222.94ms
step:430/1390 train_time:513680ms step_avg:1223.05ms
step:431/1390 train_time:514949ms step_avg:1223.16ms
step:432/1390 train_time:516204ms step_avg:1223.23ms
step:433/1390 train_time:517472ms step_avg:1223.34ms
step:434/1390 train_time:518741ms step_avg:1223.45ms
step:435/1390 train_time:520010ms step_avg:1223.55ms
step:436/1390 train_time:521276ms step_avg:1223.65ms
step:437/1390 train_time:522549ms step_avg:1223.77ms
step:438/1390 train_time:523813ms step_avg:1223.86ms
step:439/1390 train_time:525077ms step_avg:1223.96ms
step:440/1390 train_time:526344ms step_avg:1224.06ms
step:441/1390 train_time:527605ms step_avg:1224.14ms
step:442/1390 train_time:528867ms step_avg:1224.23ms
step:443/1390 train_time:530128ms step_avg:1224.31ms
step:444/1390 train_time:531392ms step_avg:1224.41ms
step:445/1390 train_time:532651ms step_avg:1224.49ms
step:446/1390 train_time:533918ms step_avg:1224.58ms
step:447/1390 train_time:535180ms step_avg:1224.67ms
step:448/1390 train_time:536452ms step_avg:1224.78ms
step:449/1390 train_time:537721ms step_avg:1224.88ms
step:450/1390 train_time:538985ms step_avg:1224.97ms
step:450/1390 val_loss:3.9043 train_time:538985ms step_avg:1224.97ms
step:451/1390 train_time:540280ms step_avg:1225.12ms
step:452/1390 train_time:541550ms step_avg:1225.23ms
step:453/1390 train_time:542805ms step_avg:1225.29ms
step:454/1390 train_time:544071ms step_avg:1225.38ms
step:455/1390 train_time:545331ms step_avg:1225.46ms
step:456/1390 train_time:546587ms step_avg:1225.53ms
step:457/1390 train_time:547844ms step_avg:1225.60ms
step:458/1390 train_time:549110ms step_avg:1225.69ms
step:459/1390 train_time:550368ms step_avg:1225.76ms
step:460/1390 train_time:551632ms step_avg:1225.85ms
step:461/1390 train_time:552895ms step_avg:1225.93ms
step:462/1390 train_time:554168ms step_avg:1226.04ms
step:463/1390 train_time:555420ms step_avg:1226.09ms
step:464/1390 train_time:556676ms step_avg:1226.16ms
step:465/1390 train_time:557943ms step_avg:1226.25ms
step:466/1390 train_time:559204ms step_avg:1226.32ms
step:467/1390 train_time:560462ms step_avg:1226.39ms
step:468/1390 train_time:561713ms step_avg:1226.45ms
step:469/1390 train_time:562980ms step_avg:1226.54ms
step:470/1390 train_time:564250ms step_avg:1226.63ms
step:471/1390 train_time:565511ms step_avg:1226.70ms
step:472/1390 train_time:566767ms step_avg:1226.77ms
step:473/1390 train_time:568029ms step_avg:1226.84ms
step:474/1390 train_time:569292ms step_avg:1226.92ms
step:475/1390 train_time:570548ms step_avg:1226.99ms
step:475/1390 val_loss:3.8731 train_time:570548ms step_avg:1226.99ms
step:476/1390 train_time:571834ms step_avg:1227.11ms
step:477/1390 train_time:573109ms step_avg:1227.21ms
step:478/1390 train_time:574371ms step_avg:1227.29ms
step:479/1390 train_time:575626ms step_avg:1227.35ms
step:480/1390 train_time:576895ms step_avg:1227.44ms
step:481/1390 train_time:578153ms step_avg:1227.50ms
step:482/1390 train_time:579417ms step_avg:1227.58ms
step:483/1390 train_time:580680ms step_avg:1227.65ms
step:484/1390 train_time:581941ms step_avg:1227.72ms
step:485/1390 train_time:583201ms step_avg:1227.79ms
step:486/1390 train_time:584454ms step_avg:1227.85ms
step:487/1390 train_time:585720ms step_avg:1227.92ms
step:488/1390 train_time:586975ms step_avg:1227.98ms
step:489/1390 train_time:588230ms step_avg:1228.04ms
step:490/1390 train_time:589492ms step_avg:1228.11ms
step:491/1390 train_time:590757ms step_avg:1228.18ms
step:492/1390 train_time:592017ms step_avg:1228.25ms
step:493/1390 train_time:593281ms step_avg:1228.32ms
step:494/1390 train_time:594553ms step_avg:1228.41ms
step:495/1390 train_time:595816ms step_avg:1228.49ms
step:496/1390 train_time:597072ms step_avg:1228.54ms
step:497/1390 train_time:598331ms step_avg:1228.61ms
step:498/1390 train_time:599590ms step_avg:1228.67ms
step:499/1390 train_time:600853ms step_avg:1228.74ms
step:500/1390 train_time:602111ms step_avg:1228.80ms
step:500/1390 val_loss:3.8539 train_time:602112ms step_avg:1228.80ms
step:501/1390 train_time:603402ms step_avg:1228.92ms
step:502/1390 train_time:604678ms step_avg:1229.02ms
step:503/1390 train_time:605945ms step_avg:1229.10ms
step:504/1390 train_time:607204ms step_avg:1229.16ms
step:505/1390 train_time:608466ms step_avg:1229.22ms
step:506/1390 train_time:609743ms step_avg:1229.32ms
step:507/1390 train_time:611007ms step_avg:1229.39ms
step:508/1390 train_time:612266ms step_avg:1229.45ms
step:509/1390 train_time:613530ms step_avg:1229.52ms
step:510/1390 train_time:614790ms step_avg:1229.58ms
step:511/1390 train_time:616056ms step_avg:1229.65ms
step:512/1390 train_time:617315ms step_avg:1229.71ms
step:513/1390 train_time:618576ms step_avg:1229.77ms
step:514/1390 train_time:619848ms step_avg:1229.86ms
step:515/1390 train_time:621112ms step_avg:1229.92ms
step:516/1390 train_time:622384ms step_avg:1230.01ms
step:517/1390 train_time:623663ms step_avg:1230.10ms
step:518/1390 train_time:624925ms step_avg:1230.17ms
step:519/1390 train_time:626193ms step_avg:1230.24ms
step:520/1390 train_time:627459ms step_avg:1230.31ms
step:521/1390 train_time:628733ms step_avg:1230.40ms
step:522/1390 train_time:629999ms step_avg:1230.47ms
step:523/1390 train_time:631270ms step_avg:1230.55ms
step:524/1390 train_time:632547ms step_avg:1230.64ms
step:525/1390 train_time:633819ms step_avg:1230.72ms
step:525/1390 val_loss:3.8230 train_time:633819ms step_avg:1230.72ms
step:526/1390 train_time:635116ms step_avg:1230.85ms
step:527/1390 train_time:636393ms step_avg:1230.93ms
step:528/1390 train_time:637655ms step_avg:1230.99ms
step:529/1390 train_time:638924ms step_avg:1231.07ms
step:530/1390 train_time:640197ms step_avg:1231.15ms
step:531/1390 train_time:641463ms step_avg:1231.22ms
step:532/1390 train_time:642739ms step_avg:1231.30ms
step:533/1390 train_time:644013ms step_avg:1231.38ms
step:534/1390 train_time:645276ms step_avg:1231.44ms
step:535/1390 train_time:646547ms step_avg:1231.52ms
step:536/1390 train_time:647811ms step_avg:1231.58ms
step:537/1390 train_time:649082ms step_avg:1231.65ms
step:538/1390 train_time:650359ms step_avg:1231.74ms
step:539/1390 train_time:651626ms step_avg:1231.81ms
step:540/1390 train_time:652892ms step_avg:1231.87ms
step:541/1390 train_time:654162ms step_avg:1231.94ms
step:542/1390 train_time:655438ms step_avg:1232.03ms
step:543/1390 train_time:656695ms step_avg:1232.07ms
step:544/1390 train_time:657960ms step_avg:1232.13ms
step:545/1390 train_time:659227ms step_avg:1232.20ms
step:546/1390 train_time:660501ms step_avg:1232.28ms
step:547/1390 train_time:661774ms step_avg:1232.35ms
step:548/1390 train_time:663043ms step_avg:1232.42ms
step:549/1390 train_time:664312ms step_avg:1232.49ms
step:550/1390 train_time:665578ms step_avg:1232.55ms
step:550/1390 val_loss:3.8030 train_time:665578ms step_avg:1232.55ms
step:551/1390 train_time:666874ms step_avg:1232.67ms
step:552/1390 train_time:668154ms step_avg:1232.76ms
step:553/1390 train_time:669431ms step_avg:1232.84ms
step:554/1390 train_time:670696ms step_avg:1232.90ms
step:555/1390 train_time:671962ms step_avg:1232.96ms
step:556/1390 train_time:673226ms step_avg:1233.01ms
step:557/1390 train_time:674497ms step_avg:1233.08ms
step:558/1390 train_time:675755ms step_avg:1233.13ms
step:559/1390 train_time:677020ms step_avg:1233.19ms
step:560/1390 train_time:678282ms step_avg:1233.24ms
step:561/1390 train_time:679553ms step_avg:1233.31ms
step:562/1390 train_time:680823ms step_avg:1233.37ms
step:563/1390 train_time:682087ms step_avg:1233.43ms
step:564/1390 train_time:683352ms step_avg:1233.49ms
step:565/1390 train_time:684624ms step_avg:1233.56ms
step:566/1390 train_time:685889ms step_avg:1233.61ms
step:567/1390 train_time:687155ms step_avg:1233.67ms
step:568/1390 train_time:688422ms step_avg:1233.73ms
step:569/1390 train_time:689693ms step_avg:1233.80ms
step:570/1390 train_time:690964ms step_avg:1233.86ms
step:571/1390 train_time:692268ms step_avg:1233.99ms
step:572/1390 train_time:693533ms step_avg:1234.04ms
step:573/1390 train_time:694803ms step_avg:1234.11ms
step:574/1390 train_time:696069ms step_avg:1234.16ms
step:575/1390 train_time:697334ms step_avg:1234.22ms
step:575/1390 val_loss:3.7876 train_time:697334ms step_avg:1234.22ms
step:576/1390 train_time:698624ms step_avg:1234.32ms
step:577/1390 train_time:699899ms step_avg:1234.39ms
step:578/1390 train_time:701171ms step_avg:1234.46ms
step:579/1390 train_time:702438ms step_avg:1234.51ms
step:580/1390 train_time:703702ms step_avg:1234.56ms
step:581/1390 train_time:704968ms step_avg:1234.62ms
step:582/1390 train_time:706228ms step_avg:1234.66ms
step:583/1390 train_time:707491ms step_avg:1234.71ms
step:584/1390 train_time:708757ms step_avg:1234.77ms
step:585/1390 train_time:710019ms step_avg:1234.82ms
step:586/1390 train_time:711284ms step_avg:1234.87ms
step:587/1390 train_time:712552ms step_avg:1234.93ms
step:588/1390 train_time:713815ms step_avg:1234.97ms
step:589/1390 train_time:715084ms step_avg:1235.03ms
step:590/1390 train_time:716352ms step_avg:1235.09ms
step:591/1390 train_time:717622ms step_avg:1235.15ms
step:592/1390 train_time:718888ms step_avg:1235.20ms
step:593/1390 train_time:720149ms step_avg:1235.25ms
step:594/1390 train_time:721424ms step_avg:1235.32ms
step:595/1390 train_time:722690ms step_avg:1235.37ms
step:596/1390 train_time:723952ms step_avg:1235.41ms
step:597/1390 train_time:725212ms step_avg:1235.46ms
step:598/1390 train_time:726479ms step_avg:1235.51ms
step:599/1390 train_time:727745ms step_avg:1235.56ms
step:600/1390 train_time:729012ms step_avg:1235.61ms
step:600/1390 val_loss:3.7644 train_time:729012ms step_avg:1235.61ms
step:601/1390 train_time:730294ms step_avg:1235.69ms
step:602/1390 train_time:731571ms step_avg:1235.76ms
step:603/1390 train_time:732834ms step_avg:1235.81ms
step:604/1390 train_time:734100ms step_avg:1235.86ms
step:605/1390 train_time:735365ms step_avg:1235.91ms
step:606/1390 train_time:736629ms step_avg:1235.96ms
step:607/1390 train_time:737893ms step_avg:1236.00ms
step:608/1390 train_time:739160ms step_avg:1236.05ms
step:609/1390 train_time:740427ms step_avg:1236.11ms
step:610/1390 train_time:741693ms step_avg:1236.16ms
step:611/1390 train_time:742962ms step_avg:1236.21ms
step:612/1390 train_time:744230ms step_avg:1236.26ms
step:613/1390 train_time:745495ms step_avg:1236.31ms
step:614/1390 train_time:746767ms step_avg:1236.37ms
step:615/1390 train_time:748031ms step_avg:1236.41ms
step:616/1390 train_time:749292ms step_avg:1236.45ms
step:617/1390 train_time:750554ms step_avg:1236.50ms
step:618/1390 train_time:751821ms step_avg:1236.55ms
step:619/1390 train_time:753103ms step_avg:1236.62ms
step:620/1390 train_time:754388ms step_avg:1236.70ms
step:621/1390 train_time:755668ms step_avg:1236.77ms
step:622/1390 train_time:756946ms step_avg:1236.84ms
step:623/1390 train_time:758233ms step_avg:1236.92ms
step:624/1390 train_time:759514ms step_avg:1236.99ms
step:625/1390 train_time:760797ms step_avg:1237.07ms
step:625/1390 val_loss:3.7451 train_time:760798ms step_avg:1237.07ms
step:626/1390 train_time:762095ms step_avg:1237.17ms
step:627/1390 train_time:763381ms step_avg:1237.25ms
step:628/1390 train_time:764657ms step_avg:1237.31ms
step:629/1390 train_time:765933ms step_avg:1237.37ms
step:630/1390 train_time:767216ms step_avg:1237.45ms
step:631/1390 train_time:768495ms step_avg:1237.51ms
step:632/1390 train_time:769768ms step_avg:1237.57ms
step:633/1390 train_time:771038ms step_avg:1237.62ms
step:634/1390 train_time:772315ms step_avg:1237.68ms
step:635/1390 train_time:773596ms step_avg:1237.75ms
step:636/1390 train_time:774875ms step_avg:1237.82ms
step:637/1390 train_time:776156ms step_avg:1237.89ms
step:638/1390 train_time:777440ms step_avg:1237.96ms
step:639/1390 train_time:778716ms step_avg:1238.02ms
step:640/1390 train_time:779997ms step_avg:1238.09ms
step:641/1390 train_time:781269ms step_avg:1238.14ms
step:642/1390 train_time:782552ms step_avg:1238.21ms
step:643/1390 train_time:783836ms step_avg:1238.29ms
step:644/1390 train_time:785118ms step_avg:1238.36ms
step:645/1390 train_time:786398ms step_avg:1238.42ms
step:646/1390 train_time:787686ms step_avg:1238.50ms
step:647/1390 train_time:788969ms step_avg:1238.57ms
step:648/1390 train_time:790255ms step_avg:1238.64ms
step:649/1390 train_time:791540ms step_avg:1238.72ms
step:650/1390 train_time:792823ms step_avg:1238.79ms
step:650/1390 val_loss:3.7290 train_time:792823ms step_avg:1238.79ms
step:651/1390 train_time:794136ms step_avg:1238.90ms
step:652/1390 train_time:795432ms step_avg:1238.99ms
step:653/1390 train_time:796710ms step_avg:1239.05ms
step:654/1390 train_time:797989ms step_avg:1239.11ms
step:655/1390 train_time:799268ms step_avg:1239.18ms
step:656/1390 train_time:800557ms step_avg:1239.25ms
step:657/1390 train_time:801835ms step_avg:1239.31ms
step:658/1390 train_time:803111ms step_avg:1239.37ms
step:659/1390 train_time:804384ms step_avg:1239.42ms
step:660/1390 train_time:805667ms step_avg:1239.49ms
step:661/1390 train_time:806954ms step_avg:1239.56ms
step:662/1390 train_time:808230ms step_avg:1239.62ms
step:663/1390 train_time:809506ms step_avg:1239.67ms
step:664/1390 train_time:810791ms step_avg:1239.74ms
step:665/1390 train_time:812080ms step_avg:1239.82ms
step:666/1390 train_time:813365ms step_avg:1239.89ms
step:667/1390 train_time:814642ms step_avg:1239.94ms
step:668/1390 train_time:815923ms step_avg:1240.00ms
step:669/1390 train_time:817201ms step_avg:1240.06ms
step:670/1390 train_time:818480ms step_avg:1240.12ms
step:671/1390 train_time:819769ms step_avg:1240.20ms
step:672/1390 train_time:821044ms step_avg:1240.25ms
step:673/1390 train_time:822328ms step_avg:1240.31ms
step:674/1390 train_time:823610ms step_avg:1240.38ms
step:675/1390 train_time:824888ms step_avg:1240.43ms
step:675/1390 val_loss:3.7149 train_time:824889ms step_avg:1240.43ms
step:676/1390 train_time:826188ms step_avg:1240.52ms
step:677/1390 train_time:827474ms step_avg:1240.59ms
step:678/1390 train_time:828748ms step_avg:1240.64ms
step:679/1390 train_time:830032ms step_avg:1240.71ms
step:680/1390 train_time:831311ms step_avg:1240.76ms
step:681/1390 train_time:832592ms step_avg:1240.82ms
step:682/1390 train_time:833870ms step_avg:1240.88ms
step:683/1390 train_time:835153ms step_avg:1240.94ms
step:684/1390 train_time:836443ms step_avg:1241.01ms
step:685/1390 train_time:837716ms step_avg:1241.06ms
step:686/1390 train_time:838991ms step_avg:1241.11ms
step:687/1390 train_time:840269ms step_avg:1241.17ms
step:688/1390 train_time:841555ms step_avg:1241.23ms
step:689/1390 train_time:842839ms step_avg:1241.29ms
step:690/1390 train_time:844110ms step_avg:1241.34ms
step:691/1390 train_time:845389ms step_avg:1241.39ms
step:692/1390 train_time:846669ms step_avg:1241.45ms
step:693/1390 train_time:847955ms step_avg:1241.52ms
step:694/1390 train_time:849229ms step_avg:1241.56ms
step:695/1390 train_time:850515ms step_avg:1241.63ms
step:696/1390 train_time:851791ms step_avg:1241.68ms
step:697/1390 train_time:853068ms step_avg:1241.73ms
step:698/1390 train_time:854352ms step_avg:1241.79ms
step:699/1390 train_time:855639ms step_avg:1241.86ms
step:700/1390 train_time:856916ms step_avg:1241.91ms
step:700/1390 val_loss:3.7008 train_time:856916ms step_avg:1241.91ms
step:701/1390 train_time:858218ms step_avg:1241.99ms
step:702/1390 train_time:859506ms step_avg:1242.06ms
step:703/1390 train_time:860786ms step_avg:1242.12ms
step:704/1390 train_time:862062ms step_avg:1242.16ms
step:705/1390 train_time:863347ms step_avg:1242.23ms
step:706/1390 train_time:864623ms step_avg:1242.27ms
step:707/1390 train_time:865897ms step_avg:1242.32ms
step:708/1390 train_time:867173ms step_avg:1242.37ms
step:709/1390 train_time:868449ms step_avg:1242.42ms
step:710/1390 train_time:869731ms step_avg:1242.47ms
step:711/1390 train_time:871013ms step_avg:1242.53ms
step:712/1390 train_time:872296ms step_avg:1242.59ms
step:713/1390 train_time:873572ms step_avg:1242.63ms
step:714/1390 train_time:874850ms step_avg:1242.69ms
step:715/1390 train_time:876123ms step_avg:1242.73ms
step:716/1390 train_time:877393ms step_avg:1242.77ms
step:717/1390 train_time:878671ms step_avg:1242.82ms
step:718/1390 train_time:879949ms step_avg:1242.87ms
step:719/1390 train_time:881224ms step_avg:1242.91ms
step:720/1390 train_time:882506ms step_avg:1242.97ms
step:721/1390 train_time:883784ms step_avg:1243.02ms
step:722/1390 train_time:885073ms step_avg:1243.08ms
step:723/1390 train_time:886355ms step_avg:1243.14ms
step:724/1390 train_time:887643ms step_avg:1243.20ms
step:725/1390 train_time:888935ms step_avg:1243.27ms
step:725/1390 val_loss:3.6858 train_time:888935ms step_avg:1243.27ms
step:726/1390 train_time:890253ms step_avg:1243.37ms
step:727/1390 train_time:891546ms step_avg:1243.44ms
step:728/1390 train_time:892826ms step_avg:1243.49ms
step:729/1390 train_time:894116ms step_avg:1243.56ms
step:730/1390 train_time:895408ms step_avg:1243.62ms
step:731/1390 train_time:896698ms step_avg:1243.69ms
step:732/1390 train_time:897981ms step_avg:1243.74ms
step:733/1390 train_time:899261ms step_avg:1243.79ms
step:734/1390 train_time:900550ms step_avg:1243.85ms
step:735/1390 train_time:901841ms step_avg:1243.92ms
step:736/1390 train_time:903143ms step_avg:1244.00ms
step:737/1390 train_time:904442ms step_avg:1244.07ms
step:738/1390 train_time:905727ms step_avg:1244.13ms
step:739/1390 train_time:907003ms step_avg:1244.17ms
step:740/1390 train_time:908297ms step_avg:1244.24ms
step:741/1390 train_time:909592ms step_avg:1244.31ms
step:742/1390 train_time:910878ms step_avg:1244.37ms
step:743/1390 train_time:912159ms step_avg:1244.42ms
step:744/1390 train_time:913453ms step_avg:1244.49ms
step:745/1390 train_time:914742ms step_avg:1244.55ms
step:746/1390 train_time:916029ms step_avg:1244.60ms
step:747/1390 train_time:917319ms step_avg:1244.67ms
step:748/1390 train_time:918614ms step_avg:1244.74ms
step:749/1390 train_time:919902ms step_avg:1244.79ms
step:750/1390 train_time:921189ms step_avg:1244.85ms
step:750/1390 val_loss:3.6755 train_time:921190ms step_avg:1244.85ms
step:751/1390 train_time:922514ms step_avg:1244.96ms
step:752/1390 train_time:923811ms step_avg:1245.03ms
step:753/1390 train_time:925096ms step_avg:1245.08ms
step:754/1390 train_time:926382ms step_avg:1245.14ms
step:755/1390 train_time:927667ms step_avg:1245.19ms
step:756/1390 train_time:928961ms step_avg:1245.26ms
step:757/1390 train_time:930251ms step_avg:1245.32ms
step:758/1390 train_time:931535ms step_avg:1245.37ms
step:759/1390 train_time:932818ms step_avg:1245.42ms
step:760/1390 train_time:934105ms step_avg:1245.47ms
step:761/1390 train_time:935424ms step_avg:1245.57ms
step:762/1390 train_time:936711ms step_avg:1245.63ms
step:763/1390 train_time:938005ms step_avg:1245.69ms
step:764/1390 train_time:939293ms step_avg:1245.75ms
step:765/1390 train_time:940587ms step_avg:1245.81ms
step:766/1390 train_time:941883ms step_avg:1245.88ms
step:767/1390 train_time:943172ms step_avg:1245.93ms
step:768/1390 train_time:944469ms step_avg:1246.00ms
step:769/1390 train_time:945754ms step_avg:1246.05ms
step:770/1390 train_time:947045ms step_avg:1246.11ms
step:771/1390 train_time:948328ms step_avg:1246.16ms
step:772/1390 train_time:949630ms step_avg:1246.23ms
step:773/1390 train_time:950917ms step_avg:1246.29ms
step:774/1390 train_time:952191ms step_avg:1246.32ms
step:775/1390 train_time:953479ms step_avg:1246.38ms
step:775/1390 val_loss:3.6659 train_time:953479ms step_avg:1246.38ms
step:776/1390 train_time:954797ms step_avg:1246.47ms
step:777/1390 train_time:956087ms step_avg:1246.53ms
step:778/1390 train_time:957363ms step_avg:1246.57ms
step:779/1390 train_time:958656ms step_avg:1246.63ms
step:780/1390 train_time:959946ms step_avg:1246.68ms
step:781/1390 train_time:961237ms step_avg:1246.74ms
step:782/1390 train_time:962525ms step_avg:1246.79ms
step:783/1390 train_time:963805ms step_avg:1246.84ms
step:784/1390 train_time:965093ms step_avg:1246.89ms
step:785/1390 train_time:966386ms step_avg:1246.95ms
step:786/1390 train_time:967666ms step_avg:1246.99ms
step:787/1390 train_time:968957ms step_avg:1247.05ms
step:788/1390 train_time:970247ms step_avg:1247.10ms
step:789/1390 train_time:971532ms step_avg:1247.15ms
step:790/1390 train_time:972817ms step_avg:1247.20ms
step:791/1390 train_time:974113ms step_avg:1247.26ms
step:792/1390 train_time:975401ms step_avg:1247.32ms
step:793/1390 train_time:976697ms step_avg:1247.38ms
step:794/1390 train_time:978001ms step_avg:1247.45ms
step:795/1390 train_time:979289ms step_avg:1247.50ms
step:796/1390 train_time:980576ms step_avg:1247.55ms
step:797/1390 train_time:981862ms step_avg:1247.60ms
step:798/1390 train_time:983164ms step_avg:1247.67ms
step:799/1390 train_time:984458ms step_avg:1247.73ms
step:800/1390 train_time:985748ms step_avg:1247.78ms
step:800/1390 val_loss:3.6560 train_time:985748ms step_avg:1247.78ms
step:801/1390 train_time:987065ms step_avg:1247.87ms
step:802/1390 train_time:988372ms step_avg:1247.94ms
step:803/1390 train_time:989683ms step_avg:1248.02ms
step:804/1390 train_time:991004ms step_avg:1248.12ms
step:805/1390 train_time:992312ms step_avg:1248.19ms
step:806/1390 train_time:993617ms step_avg:1248.26ms
step:807/1390 train_time:994909ms step_avg:1248.32ms
step:808/1390 train_time:996196ms step_avg:1248.37ms
step:809/1390 train_time:997495ms step_avg:1248.43ms
step:810/1390 train_time:998784ms step_avg:1248.48ms
step:811/1390 train_time:1000077ms step_avg:1248.54ms
step:812/1390 train_time:1001367ms step_avg:1248.59ms
step:813/1390 train_time:1002663ms step_avg:1248.65ms
step:814/1390 train_time:1003962ms step_avg:1248.71ms
step:815/1390 train_time:1005250ms step_avg:1248.76ms
step:816/1390 train_time:1006533ms step_avg:1248.80ms
step:817/1390 train_time:1007839ms step_avg:1248.87ms
step:818/1390 train_time:1009139ms step_avg:1248.93ms
step:819/1390 train_time:1010433ms step_avg:1248.99ms
step:820/1390 train_time:1011729ms step_avg:1249.05ms
step:821/1390 train_time:1013029ms step_avg:1249.11ms
step:822/1390 train_time:1014317ms step_avg:1249.16ms
step:823/1390 train_time:1015616ms step_avg:1249.22ms
step:824/1390 train_time:1016918ms step_avg:1249.28ms
step:825/1390 train_time:1018239ms step_avg:1249.37ms
step:825/1390 val_loss:3.6461 train_time:1018239ms step_avg:1249.37ms
step:826/1390 train_time:1019576ms step_avg:1249.48ms
step:827/1390 train_time:1020891ms step_avg:1249.56ms
step:828/1390 train_time:1022200ms step_avg:1249.63ms
step:829/1390 train_time:1023497ms step_avg:1249.69ms
step:830/1390 train_time:1024804ms step_avg:1249.76ms
step:831/1390 train_time:1026115ms step_avg:1249.84ms
step:832/1390 train_time:1027417ms step_avg:1249.90ms
step:833/1390 train_time:1028723ms step_avg:1249.97ms
step:834/1390 train_time:1030018ms step_avg:1250.02ms
step:835/1390 train_time:1031322ms step_avg:1250.09ms
step:836/1390 train_time:1032620ms step_avg:1250.14ms
step:837/1390 train_time:1033915ms step_avg:1250.20ms
step:838/1390 train_time:1035217ms step_avg:1250.26ms
step:839/1390 train_time:1036523ms step_avg:1250.33ms
step:840/1390 train_time:1037814ms step_avg:1250.38ms
step:841/1390 train_time:1039101ms step_avg:1250.42ms
step:842/1390 train_time:1040410ms step_avg:1250.49ms
step:843/1390 train_time:1041711ms step_avg:1250.55ms
step:844/1390 train_time:1043005ms step_avg:1250.61ms
step:845/1390 train_time:1044306ms step_avg:1250.67ms
step:846/1390 train_time:1045608ms step_avg:1250.73ms
step:847/1390 train_time:1046904ms step_avg:1250.78ms
step:848/1390 train_time:1048211ms step_avg:1250.85ms
step:849/1390 train_time:1049513ms step_avg:1250.91ms
step:850/1390 train_time:1050822ms step_avg:1250.98ms
step:850/1390 val_loss:3.6383 train_time:1050822ms step_avg:1250.98ms
step:851/1390 train_time:1052158ms step_avg:1251.08ms
step:852/1390 train_time:1053466ms step_avg:1251.15ms
step:853/1390 train_time:1054757ms step_avg:1251.19ms
step:854/1390 train_time:1056051ms step_avg:1251.25ms
step:855/1390 train_time:1057339ms step_avg:1251.29ms
step:856/1390 train_time:1058632ms step_avg:1251.34ms
step:857/1390 train_time:1059949ms step_avg:1251.42ms
step:858/1390 train_time:1061251ms step_avg:1251.48ms
step:859/1390 train_time:1062551ms step_avg:1251.53ms
step:860/1390 train_time:1063852ms step_avg:1251.59ms
step:861/1390 train_time:1065149ms step_avg:1251.64ms
step:862/1390 train_time:1066451ms step_avg:1251.70ms
step:863/1390 train_time:1067755ms step_avg:1251.76ms
step:864/1390 train_time:1069057ms step_avg:1251.82ms
step:865/1390 train_time:1070362ms step_avg:1251.89ms
step:866/1390 train_time:1071662ms step_avg:1251.94ms
step:867/1390 train_time:1072954ms step_avg:1251.99ms
step:868/1390 train_time:1074250ms step_avg:1252.04ms
step:869/1390 train_time:1075552ms step_avg:1252.10ms
step:870/1390 train_time:1076850ms step_avg:1252.15ms
step:871/1390 train_time:1078147ms step_avg:1252.20ms
step:872/1390 train_time:1079439ms step_avg:1252.25ms
step:873/1390 train_time:1080734ms step_avg:1252.30ms
step:874/1390 train_time:1082044ms step_avg:1252.37ms
step:875/1390 train_time:1083351ms step_avg:1252.43ms
step:875/1390 val_loss:3.6289 train_time:1083351ms step_avg:1252.43ms
step:876/1390 train_time:1084672ms step_avg:1252.51ms
step:877/1390 train_time:1085984ms step_avg:1252.58ms
step:878/1390 train_time:1087284ms step_avg:1252.63ms
step:879/1390 train_time:1088583ms step_avg:1252.69ms
step:880/1390 train_time:1089880ms step_avg:1252.74ms
step:881/1390 train_time:1091179ms step_avg:1252.79ms
step:882/1390 train_time:1092481ms step_avg:1252.85ms
step:883/1390 train_time:1093775ms step_avg:1252.89ms
step:884/1390 train_time:1095080ms step_avg:1252.95ms
step:885/1390 train_time:1096381ms step_avg:1253.01ms
step:886/1390 train_time:1097676ms step_avg:1253.06ms
step:887/1390 train_time:1098979ms step_avg:1253.11ms
step:888/1390 train_time:1100283ms step_avg:1253.17ms
step:889/1390 train_time:1101585ms step_avg:1253.23ms
step:890/1390 train_time:1102877ms step_avg:1253.27ms
step:891/1390 train_time:1104177ms step_avg:1253.32ms
step:892/1390 train_time:1105487ms step_avg:1253.39ms
step:893/1390 train_time:1106778ms step_avg:1253.43ms
step:894/1390 train_time:1108077ms step_avg:1253.48ms
step:895/1390 train_time:1109375ms step_avg:1253.53ms
step:896/1390 train_time:1110671ms step_avg:1253.58ms
step:897/1390 train_time:1111981ms step_avg:1253.64ms
step:898/1390 train_time:1113285ms step_avg:1253.70ms
step:899/1390 train_time:1114578ms step_avg:1253.74ms
step:900/1390 train_time:1115883ms step_avg:1253.80ms
step:900/1390 val_loss:3.6155 train_time:1115884ms step_avg:1253.80ms
step:901/1390 train_time:1117208ms step_avg:1253.88ms
step:902/1390 train_time:1118522ms step_avg:1253.95ms
step:903/1390 train_time:1119822ms step_avg:1254.00ms
step:904/1390 train_time:1121126ms step_avg:1254.06ms
step:905/1390 train_time:1122427ms step_avg:1254.11ms
step:906/1390 train_time:1123731ms step_avg:1254.16ms
step:907/1390 train_time:1125034ms step_avg:1254.22ms
step:908/1390 train_time:1126333ms step_avg:1254.27ms
step:909/1390 train_time:1127643ms step_avg:1254.33ms
step:910/1390 train_time:1128951ms step_avg:1254.39ms
step:911/1390 train_time:1130251ms step_avg:1254.44ms
step:912/1390 train_time:1131543ms step_avg:1254.48ms
step:913/1390 train_time:1132844ms step_avg:1254.53ms
step:914/1390 train_time:1134150ms step_avg:1254.59ms
step:915/1390 train_time:1135453ms step_avg:1254.64ms
step:916/1390 train_time:1136755ms step_avg:1254.70ms
step:917/1390 train_time:1138052ms step_avg:1254.74ms
step:918/1390 train_time:1139367ms step_avg:1254.81ms
step:919/1390 train_time:1140668ms step_avg:1254.86ms
step:920/1390 train_time:1141976ms step_avg:1254.92ms
step:921/1390 train_time:1143290ms step_avg:1254.98ms
step:922/1390 train_time:1144591ms step_avg:1255.03ms
step:923/1390 train_time:1145884ms step_avg:1255.08ms
step:924/1390 train_time:1147184ms step_avg:1255.13ms
step:925/1390 train_time:1148490ms step_avg:1255.18ms
step:925/1390 val_loss:3.6013 train_time:1148490ms step_avg:1255.18ms
step:926/1390 train_time:1149820ms step_avg:1255.26ms
step:927/1390 train_time:1151141ms step_avg:1255.33ms
step:928/1390 train_time:1152454ms step_avg:1255.40ms
step:929/1390 train_time:1153761ms step_avg:1255.45ms
step:930/1390 train_time:1155076ms step_avg:1255.52ms
step:931/1390 train_time:1156390ms step_avg:1255.58ms
step:932/1390 train_time:1157704ms step_avg:1255.64ms
step:933/1390 train_time:1159016ms step_avg:1255.70ms
step:934/1390 train_time:1160323ms step_avg:1255.76ms
step:935/1390 train_time:1161633ms step_avg:1255.82ms
step:936/1390 train_time:1162951ms step_avg:1255.89ms
step:937/1390 train_time:1164263ms step_avg:1255.95ms
step:938/1390 train_time:1165576ms step_avg:1256.01ms
step:939/1390 train_time:1166895ms step_avg:1256.08ms
step:940/1390 train_time:1168198ms step_avg:1256.13ms
step:941/1390 train_time:1169511ms step_avg:1256.19ms
step:942/1390 train_time:1170820ms step_avg:1256.24ms
step:943/1390 train_time:1172136ms step_avg:1256.31ms
step:944/1390 train_time:1173462ms step_avg:1256.38ms
step:945/1390 train_time:1174773ms step_avg:1256.44ms
step:946/1390 train_time:1176079ms step_avg:1256.50ms
step:947/1390 train_time:1177398ms step_avg:1256.56ms
step:948/1390 train_time:1178721ms step_avg:1256.63ms
step:949/1390 train_time:1180027ms step_avg:1256.69ms
step:950/1390 train_time:1181341ms step_avg:1256.75ms
step:950/1390 val_loss:3.5877 train_time:1181341ms step_avg:1256.75ms
step:951/1390 train_time:1182712ms step_avg:1256.87ms
step:952/1390 train_time:1184025ms step_avg:1256.93ms
step:953/1390 train_time:1185333ms step_avg:1256.98ms
step:954/1390 train_time:1186646ms step_avg:1257.04ms
step:955/1390 train_time:1187958ms step_avg:1257.10ms
step:956/1390 train_time:1189267ms step_avg:1257.15ms
step:957/1390 train_time:1190586ms step_avg:1257.22ms
step:958/1390 train_time:1191891ms step_avg:1257.27ms
step:959/1390 train_time:1193204ms step_avg:1257.33ms
step:960/1390 train_time:1194519ms step_avg:1257.39ms
step:961/1390 train_time:1195831ms step_avg:1257.45ms
step:962/1390 train_time:1197152ms step_avg:1257.51ms
step:963/1390 train_time:1198456ms step_avg:1257.56ms
step:964/1390 train_time:1199778ms step_avg:1257.63ms
step:965/1390 train_time:1201083ms step_avg:1257.68ms
step:966/1390 train_time:1202401ms step_avg:1257.74ms
step:967/1390 train_time:1203709ms step_avg:1257.79ms
step:968/1390 train_time:1205023ms step_avg:1257.85ms
step:969/1390 train_time:1206332ms step_avg:1257.91ms
step:970/1390 train_time:1207645ms step_avg:1257.96ms
step:971/1390 train_time:1208953ms step_avg:1258.02ms
step:972/1390 train_time:1210261ms step_avg:1258.07ms
step:973/1390 train_time:1211571ms step_avg:1258.12ms
step:974/1390 train_time:1212885ms step_avg:1258.18ms
step:975/1390 train_time:1214199ms step_avg:1258.24ms
step:975/1390 val_loss:3.5755 train_time:1214199ms step_avg:1258.24ms
step:976/1390 train_time:1215527ms step_avg:1258.31ms
step:977/1390 train_time:1216850ms step_avg:1258.38ms
step:978/1390 train_time:1218160ms step_avg:1258.43ms
step:979/1390 train_time:1219473ms step_avg:1258.49ms
step:980/1390 train_time:1220768ms step_avg:1258.52ms
step:981/1390 train_time:1222078ms step_avg:1258.58ms
step:982/1390 train_time:1223366ms step_avg:1258.61ms
step:983/1390 train_time:1224672ms step_avg:1258.66ms
step:984/1390 train_time:1225991ms step_avg:1258.72ms
step:985/1390 train_time:1227305ms step_avg:1258.77ms
step:986/1390 train_time:1228610ms step_avg:1258.82ms
step:987/1390 train_time:1229926ms step_avg:1258.88ms
step:988/1390 train_time:1231231ms step_avg:1258.93ms
step:989/1390 train_time:1232546ms step_avg:1258.98ms
step:990/1390 train_time:1233861ms step_avg:1259.04ms
step:991/1390 train_time:1235179ms step_avg:1259.10ms
step:992/1390 train_time:1236513ms step_avg:1259.18ms
step:993/1390 train_time:1237824ms step_avg:1259.23ms
step:994/1390 train_time:1239133ms step_avg:1259.28ms
step:995/1390 train_time:1240440ms step_avg:1259.33ms
step:996/1390 train_time:1241739ms step_avg:1259.37ms
step:997/1390 train_time:1243048ms step_avg:1259.42ms
step:998/1390 train_time:1244353ms step_avg:1259.47ms
step:999/1390 train_time:1245658ms step_avg:1259.51ms
step:1000/1390 train_time:1246965ms step_avg:1259.56ms
step:1000/1390 val_loss:3.5637 train_time:1246965ms step_avg:1259.56ms
step:1001/1390 train_time:1248306ms step_avg:1259.64ms
step:1002/1390 train_time:1249626ms step_avg:1259.70ms
step:1003/1390 train_time:1250945ms step_avg:1259.76ms
step:1004/1390 train_time:1252259ms step_avg:1259.82ms
step:1005/1390 train_time:1253573ms step_avg:1259.87ms
step:1006/1390 train_time:1254882ms step_avg:1259.92ms
step:1007/1390 train_time:1256191ms step_avg:1259.97ms
step:1008/1390 train_time:1257507ms step_avg:1260.03ms
step:1009/1390 train_time:1258821ms step_avg:1260.08ms
step:1010/1390 train_time:1260125ms step_avg:1260.12ms
step:1011/1390 train_time:1261437ms step_avg:1260.18ms
step:1012/1390 train_time:1262748ms step_avg:1260.23ms
step:1013/1390 train_time:1264050ms step_avg:1260.27ms
step:1014/1390 train_time:1265356ms step_avg:1260.31ms
step:1015/1390 train_time:1266664ms step_avg:1260.36ms
step:1016/1390 train_time:1267991ms step_avg:1260.43ms
step:1017/1390 train_time:1269312ms step_avg:1260.49ms
step:1018/1390 train_time:1270621ms step_avg:1260.54ms
step:1019/1390 train_time:1271929ms step_avg:1260.58ms
step:1020/1390 train_time:1273228ms step_avg:1260.62ms
step:1021/1390 train_time:1274527ms step_avg:1260.66ms
step:1022/1390 train_time:1275837ms step_avg:1260.71ms
step:1023/1390 train_time:1277137ms step_avg:1260.75ms
step:1024/1390 train_time:1278443ms step_avg:1260.79ms
step:1025/1390 train_time:1279741ms step_avg:1260.83ms
step:1025/1390 val_loss:3.5526 train_time:1279742ms step_avg:1260.83ms
step:1026/1390 train_time:1281073ms step_avg:1260.90ms
step:1027/1390 train_time:1282401ms step_avg:1260.96ms
step:1028/1390 train_time:1283717ms step_avg:1261.02ms
step:1029/1390 train_time:1285019ms step_avg:1261.06ms
step:1030/1390 train_time:1286318ms step_avg:1261.10ms
step:1031/1390 train_time:1287626ms step_avg:1261.14ms
step:1032/1390 train_time:1288940ms step_avg:1261.19ms
step:1033/1390 train_time:1290255ms step_avg:1261.25ms
step:1034/1390 train_time:1291591ms step_avg:1261.32ms
step:1035/1390 train_time:1292897ms step_avg:1261.36ms
step:1036/1390 train_time:1294215ms step_avg:1261.42ms
step:1037/1390 train_time:1295536ms step_avg:1261.48ms
step:1038/1390 train_time:1296847ms step_avg:1261.52ms
step:1039/1390 train_time:1298153ms step_avg:1261.57ms
step:1040/1390 train_time:1299464ms step_avg:1261.62ms
step:1041/1390 train_time:1300788ms step_avg:1261.68ms
step:1042/1390 train_time:1302098ms step_avg:1261.72ms
step:1043/1390 train_time:1303408ms step_avg:1261.77ms
step:1044/1390 train_time:1304740ms step_avg:1261.84ms
step:1045/1390 train_time:1306062ms step_avg:1261.90ms
step:1046/1390 train_time:1307364ms step_avg:1261.93ms
step:1047/1390 train_time:1308677ms step_avg:1261.98ms
step:1048/1390 train_time:1309987ms step_avg:1262.03ms
step:1049/1390 train_time:1311302ms step_avg:1262.08ms
step:1050/1390 train_time:1312620ms step_avg:1262.13ms
step:1050/1390 val_loss:3.5420 train_time:1312621ms step_avg:1262.14ms
step:1051/1390 train_time:1313967ms step_avg:1262.22ms
step:1052/1390 train_time:1315286ms step_avg:1262.27ms
step:1053/1390 train_time:1316599ms step_avg:1262.32ms
step:1054/1390 train_time:1317909ms step_avg:1262.37ms
step:1055/1390 train_time:1319215ms step_avg:1262.41ms
step:1056/1390 train_time:1320527ms step_avg:1262.45ms
step:1057/1390 train_time:1321843ms step_avg:1262.51ms
step:1058/1390 train_time:1323162ms step_avg:1262.56ms
step:1059/1390 train_time:1324476ms step_avg:1262.61ms
step:1060/1390 train_time:1325787ms step_avg:1262.65ms
step:1061/1390 train_time:1327104ms step_avg:1262.71ms
step:1062/1390 train_time:1328411ms step_avg:1262.75ms
step:1063/1390 train_time:1329722ms step_avg:1262.79ms
step:1064/1390 train_time:1331029ms step_avg:1262.84ms
step:1065/1390 train_time:1332348ms step_avg:1262.89ms
step:1066/1390 train_time:1333665ms step_avg:1262.94ms
step:1067/1390 train_time:1334981ms step_avg:1262.99ms
step:1068/1390 train_time:1336309ms step_avg:1263.05ms
step:1069/1390 train_time:1337622ms step_avg:1263.10ms
step:1070/1390 train_time:1338937ms step_avg:1263.15ms
step:1071/1390 train_time:1340251ms step_avg:1263.20ms
step:1072/1390 train_time:1341561ms step_avg:1263.24ms
step:1073/1390 train_time:1342869ms step_avg:1263.28ms
step:1074/1390 train_time:1344178ms step_avg:1263.33ms
step:1075/1390 train_time:1345485ms step_avg:1263.37ms
step:1075/1390 val_loss:3.5326 train_time:1345486ms step_avg:1263.37ms
step:1076/1390 train_time:1346825ms step_avg:1263.44ms
step:1077/1390 train_time:1348144ms step_avg:1263.49ms
step:1078/1390 train_time:1349460ms step_avg:1263.54ms
step:1079/1390 train_time:1350781ms step_avg:1263.59ms
step:1080/1390 train_time:1352104ms step_avg:1263.65ms
step:1081/1390 train_time:1353417ms step_avg:1263.69ms
step:1082/1390 train_time:1354735ms step_avg:1263.75ms
step:1083/1390 train_time:1356054ms step_avg:1263.80ms
step:1084/1390 train_time:1357374ms step_avg:1263.85ms
step:1085/1390 train_time:1358705ms step_avg:1263.91ms
step:1086/1390 train_time:1360024ms step_avg:1263.96ms
step:1087/1390 train_time:1361345ms step_avg:1264.02ms
step:1088/1390 train_time:1362662ms step_avg:1264.06ms
step:1089/1390 train_time:1363980ms step_avg:1264.11ms
step:1090/1390 train_time:1365300ms step_avg:1264.17ms
step:1091/1390 train_time:1366618ms step_avg:1264.22ms
step:1092/1390 train_time:1367919ms step_avg:1264.25ms
step:1093/1390 train_time:1369231ms step_avg:1264.29ms
step:1094/1390 train_time:1370537ms step_avg:1264.33ms
step:1095/1390 train_time:1371854ms step_avg:1264.38ms
step:1096/1390 train_time:1373166ms step_avg:1264.43ms
step:1097/1390 train_time:1374480ms step_avg:1264.47ms
step:1098/1390 train_time:1375792ms step_avg:1264.51ms
step:1099/1390 train_time:1377103ms step_avg:1264.56ms
step:1100/1390 train_time:1378406ms step_avg:1264.59ms
step:1100/1390 val_loss:3.5231 train_time:1378406ms step_avg:1264.59ms
step:1101/1390 train_time:1379756ms step_avg:1264.67ms
step:1102/1390 train_time:1381095ms step_avg:1264.74ms
step:1103/1390 train_time:1382405ms step_avg:1264.78ms
step:1104/1390 train_time:1383718ms step_avg:1264.82ms
step:1105/1390 train_time:1385035ms step_avg:1264.87ms
step:1106/1390 train_time:1386349ms step_avg:1264.92ms
step:1107/1390 train_time:1387663ms step_avg:1264.96ms
step:1108/1390 train_time:1388967ms step_avg:1265.00ms
step:1109/1390 train_time:1390281ms step_avg:1265.04ms
step:1110/1390 train_time:1391589ms step_avg:1265.08ms
step:1111/1390 train_time:1392898ms step_avg:1265.12ms
step:1112/1390 train_time:1394199ms step_avg:1265.15ms
step:1113/1390 train_time:1395513ms step_avg:1265.20ms
step:1114/1390 train_time:1396821ms step_avg:1265.24ms
step:1115/1390 train_time:1398135ms step_avg:1265.28ms
step:1116/1390 train_time:1399444ms step_avg:1265.32ms
step:1117/1390 train_time:1400781ms step_avg:1265.38ms
step:1118/1390 train_time:1402078ms step_avg:1265.41ms
step:1119/1390 train_time:1403386ms step_avg:1265.45ms
step:1120/1390 train_time:1404711ms step_avg:1265.51ms
step:1121/1390 train_time:1406018ms step_avg:1265.54ms
step:1122/1390 train_time:1407322ms step_avg:1265.58ms
step:1123/1390 train_time:1408634ms step_avg:1265.62ms
step:1124/1390 train_time:1409933ms step_avg:1265.65ms
step:1125/1390 train_time:1411259ms step_avg:1265.70ms
step:1125/1390 val_loss:3.5138 train_time:1411259ms step_avg:1265.70ms
step:1126/1390 train_time:1412602ms step_avg:1265.77ms
step:1127/1390 train_time:1413930ms step_avg:1265.83ms
step:1128/1390 train_time:1415252ms step_avg:1265.88ms
step:1129/1390 train_time:1416558ms step_avg:1265.91ms
step:1130/1390 train_time:1417870ms step_avg:1265.96ms
step:1131/1390 train_time:1419183ms step_avg:1266.00ms
step:1132/1390 train_time:1420487ms step_avg:1266.03ms
step:1133/1390 train_time:1421816ms step_avg:1266.09ms
step:1134/1390 train_time:1423136ms step_avg:1266.14ms
step:1135/1390 train_time:1424467ms step_avg:1266.19ms
step:1136/1390 train_time:1425791ms step_avg:1266.24ms
step:1137/1390 train_time:1427108ms step_avg:1266.29ms
step:1138/1390 train_time:1428425ms step_avg:1266.33ms
step:1139/1390 train_time:1429761ms step_avg:1266.40ms
step:1140/1390 train_time:1431070ms step_avg:1266.43ms
step:1141/1390 train_time:1432427ms step_avg:1266.51ms
step:1142/1390 train_time:1433756ms step_avg:1266.57ms
step:1143/1390 train_time:1435071ms step_avg:1266.61ms
step:1144/1390 train_time:1436381ms step_avg:1266.65ms
step:1145/1390 train_time:1437696ms step_avg:1266.69ms
step:1146/1390 train_time:1439018ms step_avg:1266.74ms
step:1147/1390 train_time:1440341ms step_avg:1266.79ms
step:1148/1390 train_time:1441650ms step_avg:1266.83ms
step:1149/1390 train_time:1442972ms step_avg:1266.88ms
step:1150/1390 train_time:1444300ms step_avg:1266.93ms
step:1150/1390 val_loss:3.5049 train_time:1444300ms step_avg:1266.93ms
step:1151/1390 train_time:1445653ms step_avg:1267.01ms
step:1152/1390 train_time:1446994ms step_avg:1267.07ms
step:1153/1390 train_time:1448326ms step_avg:1267.13ms
step:1154/1390 train_time:1449645ms step_avg:1267.17ms
step:1155/1390 train_time:1450968ms step_avg:1267.22ms
step:1156/1390 train_time:1452291ms step_avg:1267.27ms
step:1157/1390 train_time:1453617ms step_avg:1267.32ms
step:1158/1390 train_time:1454942ms step_avg:1267.37ms
step:1159/1390 train_time:1456265ms step_avg:1267.42ms
step:1160/1390 train_time:1457581ms step_avg:1267.46ms
step:1161/1390 train_time:1458911ms step_avg:1267.52ms
step:1162/1390 train_time:1460213ms step_avg:1267.55ms
step:1163/1390 train_time:1461534ms step_avg:1267.59ms
step:1164/1390 train_time:1462852ms step_avg:1267.64ms
step:1165/1390 train_time:1464173ms step_avg:1267.68ms
step:1166/1390 train_time:1465492ms step_avg:1267.73ms
step:1167/1390 train_time:1466824ms step_avg:1267.78ms
step:1168/1390 train_time:1468141ms step_avg:1267.82ms
step:1169/1390 train_time:1469454ms step_avg:1267.86ms
step:1170/1390 train_time:1470776ms step_avg:1267.91ms
step:1171/1390 train_time:1472103ms step_avg:1267.96ms
step:1172/1390 train_time:1473422ms step_avg:1268.01ms
step:1173/1390 train_time:1474757ms step_avg:1268.06ms
step:1174/1390 train_time:1476085ms step_avg:1268.11ms
step:1175/1390 train_time:1477414ms step_avg:1268.17ms
step:1175/1390 val_loss:3.4965 train_time:1477414ms step_avg:1268.17ms
step:1176/1390 train_time:1478769ms step_avg:1268.24ms
step:1177/1390 train_time:1480105ms step_avg:1268.30ms
step:1178/1390 train_time:1481419ms step_avg:1268.34ms
step:1179/1390 train_time:1482759ms step_avg:1268.40ms
step:1180/1390 train_time:1484085ms step_avg:1268.45ms
step:1181/1390 train_time:1485391ms step_avg:1268.48ms
step:1182/1390 train_time:1486715ms step_avg:1268.53ms
step:1183/1390 train_time:1488038ms step_avg:1268.57ms
step:1184/1390 train_time:1489367ms step_avg:1268.63ms
step:1185/1390 train_time:1490686ms step_avg:1268.67ms
step:1186/1390 train_time:1492031ms step_avg:1268.73ms
step:1187/1390 train_time:1493355ms step_avg:1268.78ms
step:1188/1390 train_time:1494673ms step_avg:1268.82ms
step:1189/1390 train_time:1495996ms step_avg:1268.87ms
step:1190/1390 train_time:1497320ms step_avg:1268.92ms
step:1191/1390 train_time:1498634ms step_avg:1268.95ms
step:1192/1390 train_time:1499958ms step_avg:1269.00ms
step:1193/1390 train_time:1501260ms step_avg:1269.03ms
step:1194/1390 train_time:1502588ms step_avg:1269.08ms
step:1195/1390 train_time:1503921ms step_avg:1269.13ms
step:1196/1390 train_time:1505247ms step_avg:1269.18ms
step:1197/1390 train_time:1506578ms step_avg:1269.23ms
step:1198/1390 train_time:1507907ms step_avg:1269.28ms
step:1199/1390 train_time:1509223ms step_avg:1269.32ms
step:1200/1390 train_time:1510524ms step_avg:1269.35ms
step:1200/1390 val_loss:3.4887 train_time:1510524ms step_avg:1269.35ms
step:1201/1390 train_time:1511886ms step_avg:1269.43ms
step:1202/1390 train_time:1513239ms step_avg:1269.50ms
step:1203/1390 train_time:1514555ms step_avg:1269.53ms
step:1204/1390 train_time:1515871ms step_avg:1269.57ms
step:1205/1390 train_time:1517203ms step_avg:1269.63ms
step:1206/1390 train_time:1518528ms step_avg:1269.67ms
step:1207/1390 train_time:1519856ms step_avg:1269.72ms
step:1208/1390 train_time:1521174ms step_avg:1269.76ms
step:1209/1390 train_time:1522497ms step_avg:1269.81ms
step:1210/1390 train_time:1523824ms step_avg:1269.85ms
step:1211/1390 train_time:1525141ms step_avg:1269.89ms
step:1212/1390 train_time:1526458ms step_avg:1269.93ms
step:1213/1390 train_time:1527780ms step_avg:1269.97ms
step:1214/1390 train_time:1529107ms step_avg:1270.02ms
step:1215/1390 train_time:1530417ms step_avg:1270.06ms
step:1216/1390 train_time:1531729ms step_avg:1270.09ms
step:1217/1390 train_time:1533038ms step_avg:1270.12ms
step:1218/1390 train_time:1534344ms step_avg:1270.15ms
step:1219/1390 train_time:1535675ms step_avg:1270.20ms
step:1220/1390 train_time:1536991ms step_avg:1270.24ms
step:1221/1390 train_time:1538308ms step_avg:1270.28ms
step:1222/1390 train_time:1539622ms step_avg:1270.31ms
step:1223/1390 train_time:1540945ms step_avg:1270.36ms
step:1224/1390 train_time:1542284ms step_avg:1270.41ms
step:1225/1390 train_time:1543599ms step_avg:1270.45ms
step:1225/1390 val_loss:3.4817 train_time:1543599ms step_avg:1270.45ms
step:1226/1390 train_time:1544950ms step_avg:1270.52ms
step:1227/1390 train_time:1546289ms step_avg:1270.57ms
step:1228/1390 train_time:1547599ms step_avg:1270.61ms
step:1229/1390 train_time:1548929ms step_avg:1270.66ms
step:1230/1390 train_time:1550246ms step_avg:1270.69ms
step:1231/1390 train_time:1551583ms step_avg:1270.75ms
step:1232/1390 train_time:1552890ms step_avg:1270.78ms
step:1233/1390 train_time:1554204ms step_avg:1270.81ms
step:1234/1390 train_time:1555520ms step_avg:1270.85ms
step:1235/1390 train_time:1556833ms step_avg:1270.88ms
step:1236/1390 train_time:1558145ms step_avg:1270.92ms
step:1237/1390 train_time:1559493ms step_avg:1270.98ms
step:1238/1390 train_time:1560816ms step_avg:1271.02ms
step:1239/1390 train_time:1562136ms step_avg:1271.06ms
step:1240/1390 train_time:1563472ms step_avg:1271.12ms
step:1241/1390 train_time:1564798ms step_avg:1271.16ms
step:1242/1390 train_time:1566129ms step_avg:1271.21ms
step:1243/1390 train_time:1567451ms step_avg:1271.25ms
step:1244/1390 train_time:1568771ms step_avg:1271.29ms
step:1245/1390 train_time:1570092ms step_avg:1271.33ms
step:1246/1390 train_time:1571418ms step_avg:1271.37ms
step:1247/1390 train_time:1572735ms step_avg:1271.41ms
step:1248/1390 train_time:1574044ms step_avg:1271.44ms
step:1249/1390 train_time:1575365ms step_avg:1271.48ms
step:1250/1390 train_time:1576698ms step_avg:1271.53ms
step:1250/1390 val_loss:3.4748 train_time:1576699ms step_avg:1271.53ms
step:1251/1390 train_time:1578062ms step_avg:1271.60ms
step:1252/1390 train_time:1579378ms step_avg:1271.64ms
step:1253/1390 train_time:1580688ms step_avg:1271.67ms
step:1254/1390 train_time:1582041ms step_avg:1271.74ms
step:1255/1390 train_time:1583366ms step_avg:1271.78ms
step:1256/1390 train_time:1584689ms step_avg:1271.82ms
step:1257/1390 train_time:1586020ms step_avg:1271.87ms
step:1258/1390 train_time:1587363ms step_avg:1271.93ms
step:1259/1390 train_time:1588675ms step_avg:1271.96ms
step:1260/1390 train_time:1590000ms step_avg:1272.00ms
step:1261/1390 train_time:1591348ms step_avg:1272.06ms
step:1262/1390 train_time:1592676ms step_avg:1272.11ms
step:1263/1390 train_time:1593998ms step_avg:1272.15ms
step:1264/1390 train_time:1595313ms step_avg:1272.18ms
step:1265/1390 train_time:1596636ms step_avg:1272.22ms
step:1266/1390 train_time:1597953ms step_avg:1272.26ms
step:1267/1390 train_time:1599285ms step_avg:1272.30ms
step:1268/1390 train_time:1600603ms step_avg:1272.34ms
step:1269/1390 train_time:1601932ms step_avg:1272.38ms
step:1270/1390 train_time:1603241ms step_avg:1272.41ms
step:1271/1390 train_time:1604560ms step_avg:1272.45ms
step:1272/1390 train_time:1605875ms step_avg:1272.48ms
step:1273/1390 train_time:1607198ms step_avg:1272.52ms
step:1274/1390 train_time:1608521ms step_avg:1272.56ms
step:1275/1390 train_time:1609841ms step_avg:1272.60ms
step:1275/1390 val_loss:3.4695 train_time:1609841ms step_avg:1272.60ms
step:1276/1390 train_time:1611189ms step_avg:1272.66ms
step:1277/1390 train_time:1612529ms step_avg:1272.71ms
step:1278/1390 train_time:1613856ms step_avg:1272.76ms
step:1279/1390 train_time:1615202ms step_avg:1272.81ms
step:1280/1390 train_time:1616535ms step_avg:1272.86ms
step:1281/1390 train_time:1617848ms step_avg:1272.89ms
step:1282/1390 train_time:1619169ms step_avg:1272.93ms
step:1283/1390 train_time:1620493ms step_avg:1272.97ms
step:1284/1390 train_time:1621812ms step_avg:1273.01ms
step:1285/1390 train_time:1623132ms step_avg:1273.04ms
step:1286/1390 train_time:1624455ms step_avg:1273.08ms
step:1287/1390 train_time:1625777ms step_avg:1273.12ms
step:1288/1390 train_time:1627104ms step_avg:1273.16ms
step:1289/1390 train_time:1628454ms step_avg:1273.22ms
step:1290/1390 train_time:1629797ms step_avg:1273.28ms
step:1291/1390 train_time:1631114ms step_avg:1273.31ms
step:1292/1390 train_time:1632428ms step_avg:1273.34ms
step:1293/1390 train_time:1633742ms step_avg:1273.38ms
step:1294/1390 train_time:1635067ms step_avg:1273.42ms
step:1295/1390 train_time:1636395ms step_avg:1273.46ms
step:1296/1390 train_time:1637731ms step_avg:1273.51ms
step:1297/1390 train_time:1639050ms step_avg:1273.54ms
step:1298/1390 train_time:1640369ms step_avg:1273.58ms
step:1299/1390 train_time:1641691ms step_avg:1273.62ms
step:1300/1390 train_time:1643018ms step_avg:1273.66ms
step:1300/1390 val_loss:3.4652 train_time:1643019ms step_avg:1273.66ms
step:1301/1390 train_time:1644356ms step_avg:1273.71ms
step:1302/1390 train_time:1645681ms step_avg:1273.75ms
step:1303/1390 train_time:1647017ms step_avg:1273.79ms
step:1304/1390 train_time:1648339ms step_avg:1273.83ms
step:1305/1390 train_time:1649661ms step_avg:1273.87ms
step:1306/1390 train_time:1650987ms step_avg:1273.91ms
step:1307/1390 train_time:1652313ms step_avg:1273.95ms
step:1308/1390 train_time:1653629ms step_avg:1273.98ms
step:1309/1390 train_time:1654951ms step_avg:1274.02ms
step:1310/1390 train_time:1656283ms step_avg:1274.06ms
step:1311/1390 train_time:1657602ms step_avg:1274.10ms
step:1312/1390 train_time:1658932ms step_avg:1274.14ms
step:1313/1390 train_time:1660255ms step_avg:1274.18ms
step:1314/1390 train_time:1661593ms step_avg:1274.23ms
step:1315/1390 train_time:1662946ms step_avg:1274.29ms
step:1316/1390 train_time:1664282ms step_avg:1274.34ms
step:1317/1390 train_time:1665610ms step_avg:1274.38ms
step:1318/1390 train_time:1666938ms step_avg:1274.42ms
step:1319/1390 train_time:1668264ms step_avg:1274.46ms
step:1320/1390 train_time:1669587ms step_avg:1274.49ms
step:1321/1390 train_time:1670923ms step_avg:1274.54ms
step:1322/1390 train_time:1672244ms step_avg:1274.58ms
step:1323/1390 train_time:1673568ms step_avg:1274.61ms
step:1324/1390 train_time:1674904ms step_avg:1274.66ms
step:1325/1390 train_time:1676241ms step_avg:1274.71ms
step:1325/1390 val_loss:3.4619 train_time:1676241ms step_avg:1274.71ms
step:1326/1390 train_time:1677605ms step_avg:1274.78ms
step:1327/1390 train_time:1678939ms step_avg:1274.82ms
step:1328/1390 train_time:1680290ms step_avg:1274.88ms
step:1329/1390 train_time:1681638ms step_avg:1274.93ms
step:1330/1390 train_time:1682992ms step_avg:1274.99ms
step:1331/1390 train_time:1684374ms step_avg:1275.07ms
step:1332/1390 train_time:1685715ms step_avg:1275.12ms
step:1333/1390 train_time:1687056ms step_avg:1275.17ms
step:1334/1390 train_time:1688397ms step_avg:1275.22ms
step:1335/1390 train_time:1689734ms step_avg:1275.27ms
step:1336/1390 train_time:1691059ms step_avg:1275.31ms
step:1337/1390 train_time:1692397ms step_avg:1275.36ms
step:1338/1390 train_time:1693747ms step_avg:1275.41ms
step:1339/1390 train_time:1695100ms step_avg:1275.47ms
step:1340/1390 train_time:1696428ms step_avg:1275.51ms
step:1341/1390 train_time:1697765ms step_avg:1275.56ms
step:1342/1390 train_time:1699089ms step_avg:1275.59ms
step:1343/1390 train_time:1700434ms step_avg:1275.64ms
step:1344/1390 train_time:1701791ms step_avg:1275.71ms
step:1345/1390 train_time:1703129ms step_avg:1275.75ms
step:1346/1390 train_time:1704478ms step_avg:1275.81ms
step:1347/1390 train_time:1705813ms step_avg:1275.85ms
step:1348/1390 train_time:1707152ms step_avg:1275.90ms
step:1349/1390 train_time:1708462ms step_avg:1275.92ms
step:1350/1390 train_time:1709810ms step_avg:1275.98ms
step:1350/1390 val_loss:3.4594 train_time:1709810ms step_avg:1275.98ms
step:1351/1390 train_time:1711176ms step_avg:1276.04ms
step:1352/1390 train_time:1712526ms step_avg:1276.10ms
step:1353/1390 train_time:1713870ms step_avg:1276.15ms
step:1354/1390 train_time:1715202ms step_avg:1276.19ms
step:1355/1390 train_time:1716523ms step_avg:1276.23ms
step:1356/1390 train_time:1717872ms step_avg:1276.28ms
step:1357/1390 train_time:1719216ms step_avg:1276.33ms
step:1358/1390 train_time:1720538ms step_avg:1276.36ms
step:1359/1390 train_time:1721871ms step_avg:1276.41ms
step:1360/1390 train_time:1723214ms step_avg:1276.46ms
step:1361/1390 train_time:1724552ms step_avg:1276.50ms
step:1362/1390 train_time:1725895ms step_avg:1276.55ms
step:1363/1390 train_time:1727239ms step_avg:1276.60ms
step:1364/1390 train_time:1728549ms step_avg:1276.62ms
step:1365/1390 train_time:1729877ms step_avg:1276.66ms
step:1366/1390 train_time:1731213ms step_avg:1276.71ms
step:1367/1390 train_time:1732541ms step_avg:1276.74ms
step:1368/1390 train_time:1733891ms step_avg:1276.80ms
step:1369/1390 train_time:1735244ms step_avg:1276.85ms
step:1370/1390 train_time:1736576ms step_avg:1276.89ms
step:1371/1390 train_time:1737917ms step_avg:1276.94ms
step:1372/1390 train_time:1739245ms step_avg:1276.98ms
step:1373/1390 train_time:1740595ms step_avg:1277.03ms
step:1374/1390 train_time:1741919ms step_avg:1277.07ms
step:1375/1390 train_time:1743242ms step_avg:1277.10ms
step:1375/1390 val_loss:3.4581 train_time:1743242ms step_avg:1277.10ms
step:1376/1390 train_time:1744611ms step_avg:1277.17ms
step:1377/1390 train_time:1745953ms step_avg:1277.22ms
step:1378/1390 train_time:1747292ms step_avg:1277.26ms
step:1379/1390 train_time:1748625ms step_avg:1277.30ms
step:1380/1390 train_time:1749971ms step_avg:1277.35ms
step:1381/1390 train_time:1751327ms step_avg:1277.41ms
step:1382/1390 train_time:1752654ms step_avg:1277.44ms
step:1383/1390 train_time:1754020ms step_avg:1277.51ms
step:1384/1390 train_time:1755348ms step_avg:1277.55ms
step:1385/1390 train_time:1756681ms step_avg:1277.59ms
step:1386/1390 train_time:1758018ms step_avg:1277.63ms
step:1387/1390 train_time:1759350ms step_avg:1277.67ms
step:1388/1390 train_time:1760669ms step_avg:1277.70ms
step:1389/1390 train_time:1761994ms step_avg:1277.73ms
step:1390/1390 train_time:1763330ms step_avg:1277.78ms
step:1390/1390 val_loss:3.4577 train_time:1763330ms step_avg:1277.78ms
peak memory consumption: 56230 MiB
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment