Created
May 30, 2025 21:36
-
-
Save tysam-code/154abdbe0087073203a3d6ba7598a232 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import sys | |
with open(sys.argv[0]) as f: | |
code = f.read() # read the code of this file ASAP, for logging | |
import uuid | |
import time | |
import glob | |
import subprocess | |
import contextlib | |
from dataclasses import dataclass | |
import torch | |
torch.empty(1, device='cuda', requires_grad=True).backward() | |
from torch import nn | |
import torch.nn.functional as F | |
import torch.distributed as dist | |
from torch.nn.parallel import DistributedDataParallel as DDP | |
# use of FlexAttention contributed by @KoszarskyB | |
from torch.nn.attention.flex_attention import BlockMask, flex_attention | |
# ----------------------------------------------------------------------------- | |
# Muon optimizer | |
@torch.compile | |
def zeropower_via_newtonschulz5(G, steps): | |
""" | |
Newton-Schulz iteration to compute the zeroth power / orthogonalization of G. We opt to use a | |
quintic iteration whose coefficients are selected to maximize the slope at zero. For the purpose | |
of minimizing steps, it turns out to be empirically effective to keep increasing the slope at | |
zero even beyond the point where the iteration no longer converges all the way to one everywhere | |
on the interval. This iteration therefore does not produce UV^T but rather something like US'V^T | |
where S' is diagonal with S_{ii}' ~ Uniform(0.5, 1.5), which turns out not to hurt model | |
performance at all relative to UV^T, where USV^T = G is the SVD. | |
""" | |
assert len(G.shape) == 2 | |
a, b, c = (3.4445, -4.7750, 2.0315) | |
X = G.bfloat16() | |
if G.size(0) > G.size(1): | |
X = X.T | |
# Ensure spectral norm is at most 1 | |
X = X / (X.norm() + 1e-7) | |
# Perform the NS iterations | |
for _ in range(steps): | |
A = X @ X.T | |
B = b * A + c * A @ A # adapted from suggestion by @jxbz, @leloykun, and @YouJiacheng | |
X = a * X + B @ X | |
if G.size(0) > G.size(1): | |
X = X.T | |
return X | |
class Muon(torch.optim.Optimizer): | |
""" | |
Muon - MomentUm Orthogonalized by Newton-schulz | |
Muon internally runs standard SGD-momentum, and then performs an orthogonalization post- | |
processing step, in which each 2D parameter's update is replaced with the nearest orthogonal | |
matrix. To efficiently orthogonalize each update, we use a Newton-Schulz iteration, which has | |
the advantage that it can be stably run in bfloat16 on the GPU. | |
Some warnings: | |
- This optimizer assumes that all parameters passed in are 2D. | |
- It should not be used for the embedding layer, the final fully connected layer, or any {0,1}-D | |
parameters; those should all be optimized by a standard method (e.g., AdamW). | |
- To use it with 4D convolutional filters, it works well to just flatten their last 3 dimensions. | |
- We believe it is unlikely to work well for training with small batch size. | |
- We believe it may not work well for finetuning pretrained models, but we haven't tested this. | |
- We have not yet tried this optimizer for training scenarios larger than NanoGPT (124M). | |
Arguments: | |
lr: The learning rate used by the internal SGD. | |
momentum: The momentum used by the internal SGD. | |
nesterov: Whether to use Nesterov-style momentum in the internal SGD. (recommended) | |
ns_steps: The number of Newton-Schulz iteration steps to use. | |
""" | |
def __init__(self, params, lr=0.02, momentum=0.95, nesterov=True, ns_steps=5): | |
self.world_size = int(os.environ['WORLD_SIZE']) | |
self.rank = int(os.environ['RANK']) | |
defaults = dict(lr=lr, momentum=momentum, nesterov=nesterov, ns_steps=ns_steps) | |
assert all(isinstance(p, torch.Tensor) for p in params) | |
sizes = {p.numel() for p in params} | |
param_groups = [dict(params=[p for p in params if p.numel() == size], | |
update_buffer=[torch.empty(size, device='cuda', dtype=torch.bfloat16) for _ in range(self.world_size)]) | |
for size in sizes] | |
super().__init__(param_groups, defaults) | |
def step(self): | |
for group in self.param_groups: | |
lr = group['lr'] | |
momentum = group['momentum'] | |
nesterov = group['nesterov'] | |
ns_steps = group['ns_steps'] | |
update_buffers = group['update_buffer'] | |
# generate weight updates in distributed fashion | |
params = group['params'] | |
""" | |
handle = None | |
params_world = None | |
def update_prev(): | |
if params_world is None: | |
return | |
assert handle is not None | |
handle.wait() | |
for p_world, g_world in zip(params_world, update_buffers): | |
p_world.data.add_( | |
g_world.view_as(p_world), | |
alpha=-lr * max(1, p_world.size(0) / p_world.size(1)) ** 0.5, | |
) | |
""" | |
# Single-GPU-only experiments, disabling comms silliness due to segfault stuff :( | |
for base_i in range(len(params)): #[::self.world_size]: | |
if True: | |
#if base_i + rank < len(params): | |
p = params[base_i + self.rank] | |
g = p.grad | |
assert g is not None | |
state = self.state[p] | |
if 'momentum_buffer' not in state: | |
state['momentum_buffer'] = torch.zeros_like(g) | |
buf = state['momentum_buffer'] | |
buf.lerp_(g, 1 - momentum) | |
g = g.lerp_(buf, momentum) if nesterov else buf | |
g = zeropower_via_newtonschulz5(g, steps=ns_steps).flatten() | |
p.data.add_(g.view_as(p), alpha=-lr * max(1, p.size(0) / p.size(1)) ** 0.5) | |
#else: | |
# g = update_buffers[rank] | |
#update_prev() # async all_gather instead of sync all_reduce by @YouJiacheng | |
#handle = dist.all_gather(update_buffers, g, async_op=True) | |
#params_world = params[base_i : base_i + self.world_size] | |
#update_prev() | |
# ----------------------------------------------------------------------------- | |
# PyTorch nn.Module definitions for the GPT-2 model | |
def norm(x): | |
return F.rms_norm(x, (x.size(-1),)) | |
class CastedLinear(nn.Linear): | |
def __init__(self, in_features, out_features): | |
super().__init__(in_features, out_features, bias=False) | |
def forward(self, x): | |
return F.linear(x, self.weight.type_as(x)) | |
class Rotary(nn.Module): | |
def __init__(self, dim, max_seq_len=65536): | |
super().__init__() | |
# half-truncate RoPE by @YouJiacheng | |
angular_freq = (1 / 1024) ** torch.linspace(0, 1, steps=dim//4, dtype=torch.float32) | |
angular_freq = torch.cat([angular_freq, angular_freq.new_zeros(dim//4)]) | |
t = torch.arange(max_seq_len, dtype=torch.float32) | |
theta = torch.einsum('i,j -> ij', t, angular_freq) | |
self.cos = nn.Buffer(theta.cos(), persistent=False) | |
self.sin = nn.Buffer(theta.sin(), persistent=False) | |
def forward(self, x): | |
cos, sin = self.cos[None, :x.size(-3), None, :], self.sin[None, :x.size(-3), None, :] | |
x1, x2 = x.float().chunk(2, dim=-1) | |
y1 = x1 * cos + x2 * sin | |
y2 = x1 * (-sin) + x2 * cos | |
return torch.cat((y1, y2), 3).type_as(x) | |
class CausalSelfAttention(nn.Module): | |
def __init__(self, dim, num_heads): | |
super().__init__() | |
assert dim % num_heads == 0 | |
self.num_heads = num_heads | |
self.c_q = CastedLinear(dim, dim) | |
self.c_k = CastedLinear(dim, dim) | |
self.c_v = CastedLinear(dim, dim) | |
self.lambdas = nn.Parameter(torch.tensor([0.5, 0.5])) | |
self.rotary = Rotary(dim // num_heads) # dim // num_heads = head_dim | |
self.c_proj = CastedLinear(dim, dim) | |
self.c_proj.weight.data.zero_() # zero init suggested by @Grad62304977 | |
def forward(self, x, ve, block_mask): | |
B, T = x.size(0), x.size(1) # batch size, sequence length | |
assert B == 1, 'Must use batch size = 1 for FlexAttention' | |
q = self.c_q(x).view(B, T, self.num_heads, -1) | |
k = self.c_k(x).view(B, T, self.num_heads, -1) | |
v = self.c_v(x).view(B, T, self.num_heads, -1) | |
if ve is not None: | |
v = self.lambdas[0] * v + self.lambdas[1] * ve.view_as(v) # @KoszarskyB & @Grad62304977 | |
else: # skip mid-layers token value embeddings by @YouJiacheng | |
v = self.lambdas[0] * v | |
q, k = norm(q), norm(k) # QK norm @Grad62304977 | |
q, k = self.rotary(q), self.rotary(k) | |
y = flex_attention(q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2), block_mask=block_mask) | |
y = y.transpose(1, 2).contiguous().view_as(x) # re-assemble all head outputs side by side | |
y = self.c_proj(y) | |
return y | |
class MLP(nn.Module): | |
def __init__(self, dim): | |
super().__init__() | |
self.c_fc = CastedLinear(dim, 4 * dim) | |
self.c_proj = CastedLinear(4 * dim, dim) | |
self.c_proj.weight.data.zero_() # zero init suggested by @Grad62304977 | |
def forward(self, x): | |
x = self.c_fc(x) | |
x = F.relu(x).square() # https://arxiv.org/abs/2109.08668v2; ~1-2% better than GELU; suggested by @SKYLINEZ007 and @Grad62304977 | |
x = self.c_proj(x) | |
return x | |
class Block(nn.Module): | |
def __init__(self, model_dim, num_heads, use_attn=True): | |
super().__init__() | |
self.attn = CausalSelfAttention(model_dim, num_heads) if use_attn else None | |
self.mlp = MLP(model_dim) | |
self.lambdas = nn.Parameter(torch.tensor([1., 0.])) | |
def forward(self, x, ve, x0, block_mask): | |
x = self.lambdas[0] * x + self.lambdas[1] * x0 | |
if self.attn is not None: | |
x = x + self.attn(norm(x), ve, block_mask) | |
x = x + self.mlp(norm(x)) | |
return x | |
class ValueEmbedding(nn.Module): | |
def __init__(self, vocab_size, model_dim): | |
super().__init__() | |
self.embed = nn.ModuleList([nn.Embedding(vocab_size, model_dim) for _ in range(3)]) | |
def forward(self, inputs): | |
ve = [emb(inputs).bfloat16() for emb in self.embed] | |
# 012 ... 012 structure on token value embeddings by @YouJiacheng, improved on @leloykun's U-net structure | |
ve = [ve[0], ve[1], ve[2], None, None, None, None, None, None, ve[0], ve[1], ve[2]] | |
return ve | |
# ----------------------------------------------------------------------------- | |
# The main GPT-2 model | |
class GPT(nn.Module): | |
def __init__(self, vocab_size, num_layers, num_heads, model_dim): | |
super().__init__() | |
self.embed = nn.Embedding(vocab_size, model_dim) | |
# skip attention of blocks.7 (the 8th layer) by @YouJiacheng | |
self.blocks = nn.ModuleList([Block(model_dim, num_heads, use_attn=(i != 7)) | |
for i in range(num_layers)]) | |
# token value embeddings by @KoszarskyB - inspired by @Grad62304977's value residual learning | |
# U-net structure on token value embeddings by @leloykun | |
self.value_embeds = ValueEmbedding(vocab_size, model_dim) | |
self.lm_head = CastedLinear(model_dim, vocab_size) | |
self.lm_head.weight.data.zero_() # @Grad62304977 | |
# U-net design by @brendanh0gan | |
self.num_encoder_layers = num_layers // 2 # Half of the layers for encoder | |
self.num_decoder_layers = num_layers - self.num_encoder_layers # Remaining for decoder | |
# Add learnable skip connection weights for decoder layers | |
self.skip_weights = nn.Parameter(torch.ones(self.num_decoder_layers)) | |
def forward(self, inputs, targets, sliding_window_num_blocks): | |
BLOCK_SIZE = 128 | |
seq_len = len(inputs) | |
assert seq_len % BLOCK_SIZE == 0 | |
total_num_blocks = seq_len // BLOCK_SIZE | |
assert inputs.ndim == 1 | |
docs = (inputs == 50256).cumsum(0) | |
docs_low = docs.view(-1, BLOCK_SIZE)[:, 0].contiguous() | |
docs_high = docs.view(-1, BLOCK_SIZE)[:, -1].contiguous() | |
def document_causal(b, h, q_idx, kv_idx): | |
causal_mask = q_idx >= kv_idx | |
document_mask = docs[q_idx] == docs[kv_idx] | |
return causal_mask & document_mask | |
def dense_to_ordered(dense_mask): | |
num_blocks = dense_mask.sum(dim=-1, dtype=torch.int32) | |
indices = dense_mask.argsort(dim=-1, descending=True, stable=True).to(torch.int32) | |
return num_blocks[None, None].contiguous(), indices[None, None].contiguous() | |
def create_doc_swc_block_mask(sliding_window_num_blocks): | |
kv_idx = block_idx = torch.arange(total_num_blocks, dtype=torch.int32, device='cuda') | |
q_idx = block_idx[:, None] | |
causal_bm = q_idx >= kv_idx | |
causal_full_bm = q_idx > kv_idx | |
window_bm = q_idx - kv_idx < sliding_window_num_blocks | |
window_full_bm = window_bm # block-wise sliding window by @YouJiacheng | |
# document_bm = (docs_low[q_idx] <= docs_high[kv_idx]) & (docs_low[kv_idx] <= docs_high[q_idx]) | |
document_bm = (docs_low[:, None] <= docs_high) & (docs_low <= docs_high[:, None]) | |
document_full_bm = (docs_low[:, None] == docs_high) & (docs_low == docs_high[:, None]) | |
nonzero_bm = causal_bm & window_bm & document_bm | |
full_bm = causal_full_bm & window_full_bm & document_full_bm | |
kv_num_blocks, kv_indices = dense_to_ordered(nonzero_bm & ~full_bm) | |
full_kv_num_blocks, full_kv_indices = dense_to_ordered(full_bm) | |
return BlockMask.from_kv_blocks( | |
kv_num_blocks, | |
kv_indices, | |
full_kv_num_blocks, | |
full_kv_indices, | |
BLOCK_SIZE=BLOCK_SIZE, | |
mask_mod=document_causal, | |
) | |
block_mask = create_doc_swc_block_mask(sliding_window_num_blocks) | |
x0 = norm(self.embed(inputs[None]).bfloat16()) # use of norm here by @Grad62304977 | |
x = x0 | |
ve = self.value_embeds(inputs) | |
assert len(ve) == len(self.blocks) | |
ve_enc, ve_dec = ve[:self.num_encoder_layers], ve[self.num_encoder_layers:] | |
# Store outputs for U-Net skip connections | |
skip_connections = [] | |
# Encoder pass - process only the first half of the blocks | |
for i in range(self.num_encoder_layers): | |
x = self.blocks[i](x, ve_enc[i], x0, block_mask) | |
skip_connections.append(x) | |
# Decoder pass - process the remaining blocks with weighted skip connections | |
for i in range(self.num_decoder_layers): | |
x = x + self.skip_weights[i] * skip_connections.pop() | |
# U-net structure on token value embeddings by @leloykun | |
x = self.blocks[self.num_encoder_layers + i](x, ve_dec[i], x0, block_mask) | |
x = norm(x) | |
logits = self.lm_head(x) | |
logits = 15 * torch.tanh(logits / 15) # @Grad62304977 added tanh softcapping, @KoszarskyB reduced it from 30 to 15 | |
logits = logits.float() | |
loss = F.cross_entropy(logits.view(-1, logits.size(-1)), targets) | |
return loss | |
# ----------------------------------------------------------------------------- | |
# Our own simple Distributed Data Loader | |
def _load_data_shard(path): | |
# only reads the header, returns header data | |
# header is 256 int32 | |
header = torch.from_file(path, False, 256, dtype=torch.int32) | |
assert header[0] == 20240520, 'magic number mismatch in the data .bin file' | |
assert header[1] == 1, 'unsupported version' | |
num_tokens = int(header[2]) # number of tokens (claimed) | |
with open(path, 'rb', buffering=0) as f: | |
tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) # avoid pin_memory copy by @YouJiacheng | |
f.seek(256 * 4) | |
nbytes = f.readinto(tokens.numpy()) # avoid bytes->array copy by @YouJiacheng | |
assert nbytes == 2 * num_tokens, 'number of tokens read does not match header' | |
return tokens | |
class DistributedDataLoader: | |
def __init__(self, filename_pattern): | |
self.rank = int(os.environ['RANK']) | |
self.world_size = int(os.environ['WORLD_SIZE']) | |
self.files = sorted(glob.glob(filename_pattern)) | |
self.reset() | |
def reset(self): | |
self.current_shard = -1 | |
self.advance() | |
def advance(self): | |
self.current_shard = (self.current_shard + 1) % len(self.files) | |
self.current_position = 0 | |
self.tokens = _load_data_shard(self.files[self.current_shard]) | |
def next_batch(self, batch_size): | |
assert batch_size % self.world_size == 0 | |
device_batch_size = batch_size // self.world_size | |
# load next shard if necessary | |
if self.current_position + batch_size + 1 >= len(self.tokens): | |
self.advance() | |
pos = self.current_position + self.rank * device_batch_size | |
device_batch_tokens = self.tokens[pos:pos+device_batch_size+1] | |
# advance current position | |
self.current_position += batch_size | |
inputs = device_batch_tokens[:-1].to(device='cuda', dtype=torch.int32, non_blocking=True) | |
targets = device_batch_tokens[1:].to(device='cuda', dtype=torch.int64, non_blocking=True) | |
return inputs, targets | |
# ----------------------------------------------------------------------------- | |
# int main | |
@dataclass | |
class Hyperparameters: | |
# data | |
train_bin = 'data/fineweb10B/fineweb_train_*.bin' # input .bin to train on | |
val_bin = 'data/fineweb10B/fineweb_val_*.bin' # input .bin to eval validation loss on | |
# optimization | |
batch_size = 8*64*1024 # batch size in tokens | |
max_device_batch_size = 64*1024 # batch size per device in tokens | |
num_iterations = 1390 # number of iterations to run | |
cooldown_frac = 0.4 # fraction of training spent cooling down the learning rate | |
bf16_embeds = True | |
# evaluation and logging | |
val_loss_every = 25 #25 #125 # every how many steps to evaluate val loss? 0 for only at the end | |
val_tokens = 10485760 # how many tokens of validation data? it's important to keep this fixed for consistent comparisons | |
# implementation | |
save_checkpoint = False | |
args = Hyperparameters() | |
micro_bs = args.max_device_batch_size | |
# set up DDP (distributed data parallel). torchrun sets this env variable | |
rank = int(os.environ['RANK']) | |
local_rank = int(os.environ['LOCAL_RANK']) | |
world_size = int(os.environ['WORLD_SIZE']) | |
assert torch.cuda.is_available() | |
torch.cuda.set_device(local_rank) | |
dist.init_process_group(backend='nccl', device_id=torch.device(local_rank)) | |
dist.barrier() | |
master_process = (rank == 0) # this process will do logging, checkpointing etc. | |
# begin logging | |
logfile = None | |
if master_process: | |
run_id = uuid.uuid4() | |
os.makedirs('logs', exist_ok=True) | |
logfile = f'logs/{run_id}.txt' | |
print(logfile) | |
def print0(s, console=False): | |
if master_process: | |
with open(logfile, 'a') as f: | |
if console: | |
print(s) | |
print(s, file=f) | |
# begin by printing this file (the Python code) | |
print0(code) | |
print0('='*100) | |
# log information about the hardware/software environment this is running on | |
print0(f'Running Python {sys.version}') | |
print0(f'Running PyTorch {torch.version.__version__} compiled for CUDA {torch.version.cuda}') | |
print0(subprocess.run(['nvidia-smi'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True).stdout) | |
print0('='*100) | |
# load data | |
train_loader = DistributedDataLoader(args.train_bin) | |
val_loader = DistributedDataLoader(args.val_bin) | |
print0(f'Training dataloader files: {train_loader.files}') | |
print0(f'Validation dataloader files: {val_loader.files}') | |
print0('='*100) | |
# init model_opt dict, this will hold all of the separate models that we use here | |
# (Both the lrs and the momentums are defined as ranges further below) | |
#outer_opt_lr = 1.0 #0.7 | |
#outer_opt_momentum = .9 | |
models_opts_schedulers = [] | |
num_models_to_simulate = 8 | |
#diloco_update_steps = 1 | |
diloco_update_steps = 25 #10 | |
# Steps to compile before copying out the model to its replicas | |
compile_steps = 15 #2 #20 | |
# Test schedule, this should basically be at or near the baseline | |
diloco_node_warmup_schedule = { | |
0: 8 | |
} | |
# initialize the base number of diloco nodes to use | |
curr_num_diloco_nodes = list(diloco_node_warmup_schedule.values())[0] | |
def get_active_nodes(curr_num_nodes, total_num_nodes): | |
assert total_num_nodes % curr_num_nodes == 0 | |
stride = total_num_nodes // curr_num_nodes | |
result = list(range(0, total_num_nodes, stride)) | |
print("active nodes", result) | |
return result | |
############################################### | |
# Make Logarithmic DiLoCo Update Schedule # | |
############################################### | |
#total_steps = 1390 | |
#spacing_factor = 150 # the original schedule is scaled by this, this impacts how quickly the initial steps grow | |
# Calculate total update steps, add one if it's not divided evenly since we need 1 more for the final step in training | |
is_final_update_step = (args.num_iterations % diloco_update_steps > 0) | |
orig_schedule = torch.range(diloco_update_steps, args.num_iterations - (args.num_iterations % diloco_update_steps), step=diloco_update_steps, dtype=torch.int, device=torch.device('cuda')) | |
if is_final_update_step: | |
orig_schedule = torch.cat([orig_schedule, torch.tensor([args.num_iterations], dtype=torch.int, device=torch.device('cuda'))], dim=-1) | |
# We will need to reverse this -- note we use the orig_arange, not orig_schedule, as we still need to add the last step | |
#log_schedule = torch.log1p(orig_schedule.float() / spacing_factor) | |
log_schedule = torch.log1p(orig_schedule.float()) #### / spacing_factor) | |
# we need to reverse this before passing it through the logarithm so the density works properly (dense early, slow later) | |
log_schedule = log_schedule.max() - log_schedule | |
# linearly scale to match the original range | |
log_schedule = log_schedule * (args.num_iterations / log_schedule.max()) | |
# cast to int and flip for proper ordering | |
log_schedule = log_schedule.int().flip(dims=(-1,)) | |
# semi-hacky for now, but makes code logic easier | |
log_schedule = log_schedule.tolist() | |
# Rebinding for hacking convenience (if we need to hack a different schedule in) | |
#diloco_outer_schedule = log_schedule | |
diloco_outer_schedule = orig_schedule.tolist() | |
print("diloco outer schedule!", diloco_outer_schedule) | |
print("num outer diloco update steps!", len(diloco_outer_schedule)) | |
# there are only 50257 unique GPT-2 tokens; we extend to nearest multiple of 128 for efficiency. suggested to me by @Grad62304977. | |
# this originates from Karpathy's experiments. | |
core_model = GPT(vocab_size=50304, num_layers=12, num_heads=6, model_dim=768) | |
core_model = core_model.cuda() | |
if args.bf16_embeds: | |
for m in core_model.modules(): | |
if isinstance(m, nn.Embedding): | |
m.bfloat16() | |
core_model = torch.compile(core_model) | |
#####ddp_model = DDP(model, device_ids=[local_rank], broadcast_buffers=False, gradient_as_bucket_view=True) | |
# Add outer Nesterov optimizer to the core model | |
# The hyperparameters for these are updated every step, so we set them to reasonable defaults instead | |
big_adam_params = [p for p in core_model.parameters() if list(p.shape) == [50304, 768]] | |
other_params = [p for p in core_model.parameters() if list(p.shape) != [50304, 768]] | |
#outer_opt = torch.optim.SGD(core_model.parameters(), lr=1.0, momentum=0.9, nesterov=True) | |
outer_opt = torch.optim.SGD([dict(params=big_adam_params, lr=0.6, nesterov=True), | |
dict(params=other_params, lr=0.6, nesterov=True)]) | |
# Set an LR mult for the nesterov step for the inner Adam parameters | |
# big adam | |
outer_opt.param_groups[0]['max_lr'] = 1. #.8 #1.4 #1.0 #1.4 | |
outer_opt.param_groups[0]['min_lr'] = .7 #.5 #.9 #.7 #.9 #.7 | |
outer_opt.param_groups[0]['nesterov_warmup'] = 1800 #1000 | |
# others | |
outer_opt.param_groups[1]['max_lr'] = 1. | |
outer_opt.param_groups[1]['min_lr'] = .7 | |
outer_opt.param_groups[1]['nesterov_warmup'] = 1000 | |
###################################################################################### | |
# Set Initial Momentum To 0 in outer_opt (PyTorch bug w/ first step dampening) # | |
###################################################################################### | |
for parameter in core_model.parameters(): | |
parameter.grad = torch.zeros_like(parameter) | |
# Set outer opt momentum buffers (best to do this internally to avoid spaghetti code) | |
outer_opt.step() | |
core_model.zero_grad(set_to_none=True) | |
print("Compiling model!") | |
# call model so it is properly built, before cloning | |
for _ in range(compile_steps): | |
core_model.forward(torch.randint(0, 128, (1024*64,)).to(device='cuda', dtype=torch.long), torch.randint(0, 128, (1024*64,)).to(device='cuda', dtype=torch.long), torch.tensor([128], device='cuda', dtype=torch.long)).mean().backward() | |
# Set gradients to none | |
core_model.zero_grad(set_to_none=True) | |
print("Model compiled.") | |
# tmp dev import | |
import copy | |
for _ in range(num_models_to_simulate): | |
# make model copy | |
model_copy = copy.deepcopy(core_model) | |
# collect the parameters to optimize | |
hidden_matrix_params = [p for p in model_copy.blocks.parameters() if p.ndim == 2] | |
embed_params = [model_copy.embed.weight, *model_copy.value_embeds.parameters()] | |
scalar_params = [p for p in model_copy.parameters() if p.ndim < 2] | |
head_params = [model_copy.lm_head.weight] | |
# init the optimizer(s) | |
optimizer1 = torch.optim.Adam([dict(params=embed_params, lr=0.6), | |
dict(params=head_params, lr=0.008), | |
dict(params=scalar_params, lr=0.04)], | |
betas=(0.8, 0.95), fused=True) | |
optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95) | |
# For custom LR scheduling, we set the base lr in the optimizer, so we can calculate schedules based on that | |
optimizers = [optimizer1, optimizer2] | |
for opt in optimizers: | |
for group in opt.param_groups: | |
group['base_lr'] = group['lr'] | |
# learning rate schedule: stable then decay | |
def get_lr(it): | |
t = 1 - it / args.num_iterations # time remaining in training | |
#assert 1 >= t > 0 | |
t = max(0., min(t, 1.)) | |
# 1) constant lr for first part of training | |
if t >= args.cooldown_frac: | |
return 1.0 | |
# 2) then linear cooldown | |
else: | |
return t / args.cooldown_frac | |
schedulers = [torch.optim.lr_scheduler.LambdaLR(opt, get_lr) for opt in optimizers] | |
models_opts_schedulers.append((model_copy, optimizers, schedulers)) | |
outer_opt_scheduler = torch.optim.lr_scheduler.LambdaLR(outer_opt, get_lr) | |
# sliding window size schedule: linear increase over training in chunks of 128 from 128 -> 1792. By @fernbear.bsky.social | |
def get_sliding_window_blocks(it): | |
x = it / args.num_iterations # training progress | |
assert 0 <= x <= 1 | |
return int(((1 - x) * 128 + x * 1856) // 128) | |
sliding_window_num_blocks = torch.tensor(1, dtype=torch.int32, device='cuda') | |
# Start training loop | |
training_time_ms = 0 | |
# start the clock | |
torch.cuda.synchronize() | |
t0 = time.perf_counter() | |
# begin training | |
train_steps = args.num_iterations | |
for step in range(train_steps + 1): | |
last_step = (step == train_steps) | |
# This effectively ignores timing first 10 steps, which are slower for weird reasons. | |
# Alternately, and slightly more correctly in terms of benchmarking, we could do 10 | |
# steps with dummy data first, and then re-initialize the model and reset the loader. | |
if step == 10: | |
training_time_ms = 0 | |
t0 = time.perf_counter() | |
timed_steps = float('nan') if step <= 11 else (step - 10) + 1 # <= 11 to avoid bug in val | |
sliding_window_num_blocks.copy_(get_sliding_window_blocks(step)) | |
############################################# | |
# DiLoCo Outer Loop (Distributed) Updates # | |
############################################# | |
# Update core model w/ updates from other models (optionally on different timescales for different parts, just simply 1 step per for now) | |
# Zip all parameters together, so we can stack them then average them, then merge them to the core model | |
#if last_step or (step != 0 and step in diloco_outer_schedule): | |
# We need to check back a step since this needs to happen before the main code does. #TODO is to move this above the main code, we need to check if there are any logic breaks or the like before doing it however. | |
# So, this is a quick hack to get things up and going properly | |
if last_step or (step != 0 and step in diloco_outer_schedule): | |
custom_update_every = not (last_step or (step != 0 and step in diloco_outer_schedule)) | |
# filter the models so that we only update the ones that are active currently | |
active_node_model_params = [amos[0].parameters() for amos in active_model_opts_schedulers] #[models_opts_schedulers[i][0].parameters() for i in active_nodes] | |
print("active node model params!", active_node_model_params) | |
grouped_active_node_params = zip(*active_node_model_params) | |
################## | |
# Momentum # | |
################## | |
#outer_opt_momentum_warmup_steps = 1000 #1400 #400 #10000 #1000 #1000 #300 #250 #500 #100 #300 #600 #300 | |
#outer_opt_max_base_lr = 1. | |
#outer_opt_min_base_lr = .7 | |
outer_opt_min_momentum = 0. | |
outer_opt_max_momentum = .9 | |
################################# | |
# Outer Opt Hyp Schedules # | |
################################# | |
for group in outer_opt.param_groups: | |
frac = min(step/group['nesterov_warmup'], 1) | |
curr_outer_momentum = (1 - frac) * outer_opt_min_momentum + frac * outer_opt_max_momentum | |
# Hacky, this should be consolidated into one single function for the LR scheduler | |
curr_outer_base_lr = (frac * group['min_lr'] + (1. - frac) * group['max_lr']) | |
group['lr'] = curr_outer_base_lr * get_lr(step) | |
# Schedule dampening from momentum | |
group['momentum'] = curr_outer_momentum | |
group['dampening'] = 1. - curr_outer_momentum | |
print("len active nodes!", len(active_nodes)) | |
####################################### | |
# Simulate Grad Diff All-Reduce # | |
####################################### | |
print("shortly before diloco update!") | |
for core_parameters, dist_parameters_list in zip(core_model.parameters(), grouped_active_node_params): | |
skip_nesterov = False #True | |
if False: #custom_update_every: | |
if len(active_nodes) > 1 and list(core_parameters.shape) == [50304, 768]: #len(core_parameters.shape) < 2 and len(active_nodes) > 1: | |
print("updating custom parameter every step") | |
mean = torch.stack(dist_parameters_list, dim=0).mean(dim=0) | |
for dist_params in dist_parameters_list: | |
dist_params.data.copy_(mean.data, non_blocking=True) | |
else: | |
# Only do a DiLoCo update if there is more than 1 simulated node | |
if len(active_nodes) > 1: | |
if skip_nesterov: #list(core_parameters.shape) == [50304, 768]: | |
mean = torch.stack(dist_parameters_list, dim=0).mean(dim=0) | |
core_parameters.data.copy_(mean.data, non_blocking=True) | |
for dist_params in dist_parameters_list: | |
dist_params.data.copy_(mean.data, non_blocking=True) | |
else: | |
grads_all = (core_parameters.data.unsqueeze(0) - torch.stack(dist_parameters_list, dim=0)) | |
core_parameters.grad = grads_all.mean(dim=0) | |
# Bug warning -- this should be a bug! Does this perform better like this? If so, why? | |
############################### | |
# DiLoCo Outer Opt Step # | |
############################### | |
outer_opt.step() | |
outer_opt.zero_grad(set_to_none=True) | |
################################################### | |
# Simulate Update Happening Simulataneously # | |
################################################### | |
for dist_params in dist_parameters_list: | |
dist_params.data.copy_(core_parameters.data, non_blocking=True) | |
# TODO: Get this to work (loss display bug) | |
# --------------- VALIDATION SECTION ----------------- | |
#if last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): | |
# Only eval after every DiLoCo update step | |
if last_step or (args.val_loss_every > 0 and step in diloco_outer_schedule): #step % args.val_loss_every == 0): | |
# stop the clock | |
torch.cuda.synchronize() | |
training_time_ms += 1000 * (time.perf_counter() - t0) | |
# run validation batches | |
core_model.eval() | |
val_loader.reset() | |
val_loss = 0.0 | |
# calculate the number of steps to take in the val loop. | |
val_batch_size = world_size * micro_bs | |
assert args.val_tokens % val_batch_size == 0 | |
val_steps = args.val_tokens // val_batch_size | |
for _ in range(val_steps): | |
with torch.no_grad(): | |
inputs_val, targets_val = val_loader.next_batch(val_batch_size) | |
val_loss += core_model(inputs_val, targets_val, sliding_window_num_blocks) | |
dist.all_reduce(val_loss, op=dist.ReduceOp.AVG) | |
val_loss /= val_steps | |
# logging | |
print0(f'step:{step}/{train_steps} val_loss:{val_loss:.4f} train_time:{training_time_ms:.0f}ms step_avg:{training_time_ms/(timed_steps-1):.2f}ms', console=True) | |
# start the clock again | |
torch.cuda.synchronize() | |
t0 = time.perf_counter() | |
if last_step: | |
if master_process and args.save_checkpoint: | |
log = dict(step=step, code=code, model=model.state_dict(), optimizers=[opt.state_dict() for opt in optimizers]) | |
os.makedirs(f'logs/{run_id}', exist_ok=True) | |
torch.save(log, f'logs/{run_id}/state_step{step:06d}.pt') | |
# the last step only has the validation loop, so break to avoid training | |
break | |
# --------------- TRAINING SECTION ----------------- | |
#model.train() | |
# set each model to train | |
for model, _, _ in models_opts_schedulers: | |
model.train() | |
batch_size = args.batch_size | |
assert batch_size % world_size == 0 | |
inputs_train, targets_train = train_loader.next_batch(batch_size) | |
assert len(inputs_train) <= micro_bs or len(inputs_train) % micro_bs == 0 | |
assert batch_size//micro_bs == len(models_opts_schedulers), "Microbatchsize and number of model_opt pairs need to be equal in this experiment (functions may need to be written to support iteration over model pairs instead of indexing by microbatch idx)." | |
active_nodes = get_active_nodes(curr_num_diloco_nodes, num_models_to_simulate) | |
models_to_run_idx = active_nodes * (num_models_to_simulate // curr_num_diloco_nodes) | |
total_loss = 0 | |
for i, (micro_inputs_train, micro_targets_train) in enumerate(zip(inputs_train.split(micro_bs), targets_train.split(micro_bs))): | |
# forward on distinct model | |
model_to_run = models_opts_schedulers[models_to_run_idx[i]][0] | |
#model_to_run = models_opts_schedulers[i][0] #[models_to_run_idx[i]][0] | |
print(f"running model #{models_to_run_idx[i]}") | |
#model_to_run(micro_inputs_train, micro_targets_train, sliding_window_num_blocks).div(num_models_to_simulate/curr_num_diloco_nodes).backward() | |
loss_scale = num_models_to_simulate/curr_num_diloco_nodes | |
loss = model_to_run(micro_inputs_train, micro_targets_train, sliding_window_num_blocks).div(loss_scale)#.backward() | |
print("loss!", loss.item() * loss_scale) | |
loss.backward() | |
total_loss += 1./num_models_to_simulate * loss_scale * loss.item() | |
# momentum warmup for Muon | |
frac = min(step/300, 1) | |
active_model_opts_schedulers = [models_opts_schedulers[i] for i in active_nodes] | |
#active_model_opts_schedulers = models_opts_schedulers #[models_opts_schedulers[i] for i in active_nodes] | |
print("active model opts schedulers indices:", active_nodes) | |
for model, opts, _ in active_model_opts_schedulers: | |
# update momentum for muon in each group | |
for group in opts[1].param_groups: #optimizer2.param_groups: | |
group['momentum'] = (1 - frac) * 0.85 + frac * 0.95 | |
# step the optimizers and schedulers | |
for opt in opts: | |
# Update LR | |
for group in opt.param_groups: | |
# LR should be a function of the current step, and maybe how many accumulations are used | |
# (TODO later for the other LR adjustments) | |
group['lr'] = group['base_lr'] * get_lr(step) | |
if isinstance(opt, torch.optim.Adam): | |
# hardcoding base eps here. Tuned around 8 accumulation steps | |
# this grow scale drastically impacts how the eps changes w/ number of nodes, seems | |
# to be important for stability? | |
eps_grow_scale = .8 #.7 | |
#new_eps = 1e-10 ** (1. / (eps_grow_scale * torch.log1p(torch.tensor(curr_num_diloco_nodes)).item())) | |
#group['eps'] = 1e-16 #new_eps #1e-8 ** (1./curr_num_diloco_nodes) | |
#group['betas'] = (0.8, 0.92 ** (eps_grow_scale * torch.log1p(torch.tensor(curr_num_diloco_nodes)).item())) | |
# Step | |
opt.step() | |
# null the gradients | |
model.zero_grad(set_to_none=True) | |
# logging | |
approx_time = training_time_ms + 1000 * (time.perf_counter() - t0) | |
print0(f'step:{step+1}/{train_steps} loss:{total_loss:.4f} train_time:{approx_time:.0f}ms step_avg:{approx_time/timed_steps:.2f}ms', console=True) | |
print0(f'peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB') | |
dist.destroy_process_group() | |
==================================================================================================== | |
Running Python 3.12.7 (main, May 29 2025, 18:11:21) [GCC 13.2.0] | |
Running PyTorch 2.8.0.dev20250529+cu126 compiled for CUDA 12.6 | |
Thu May 29 18:53:17 2025 | |
+-----------------------------------------------------------------------------------------+ | |
| NVIDIA-SMI 550.144.03 Driver Version: 550.144.03 CUDA Version: 12.4 | | |
|-----------------------------------------+------------------------+----------------------+ | |
| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC | | |
| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. | | |
| | | MIG M. | | |
|=========================================+========================+======================| | |
| 0 NVIDIA H200 On | 00000000:8D:00.0 Off | 0 | | |
| N/A 32C P0 116W / 700W | 1172MiB / 143771MiB | 1% Default | | |
| | | Disabled | | |
+-----------------------------------------+------------------------+----------------------+ | |
+-----------------------------------------------------------------------------------------+ | |
| Processes: | | |
| GPU GI CI PID Type Process name GPU Memory | | |
| ID ID Usage | | |
|=========================================================================================| | |
+-----------------------------------------------------------------------------------------+ | |
==================================================================================================== | |
Training dataloader files: ['data/fineweb10B/fineweb_train_000001.bin', 'data/fineweb10B/fineweb_train_000002.bin', 'data/fineweb10B/fineweb_train_000003.bin', 'data/fineweb10B/fineweb_train_000004.bin', 'data/fineweb10B/fineweb_train_000005.bin', 'data/fineweb10B/fineweb_train_000006.bin', 'data/fineweb10B/fineweb_train_000007.bin', 'data/fineweb10B/fineweb_train_000008.bin'] | |
Validation dataloader files: ['data/fineweb10B/fineweb_val_000000.bin'] | |
==================================================================================================== | |
step:1/1390 loss:10.8258 train_time:49743ms step_avg:nanms | |
step:2/1390 loss:10.2452 train_time:50779ms step_avg:nanms | |
step:3/1390 loss:8.8388 train_time:51870ms step_avg:nanms | |
step:4/1390 loss:7.9485 train_time:52957ms step_avg:nanms | |
step:5/1390 loss:7.7019 train_time:54048ms step_avg:nanms | |
step:6/1390 loss:7.3829 train_time:55141ms step_avg:nanms | |
step:7/1390 loss:7.3480 train_time:56232ms step_avg:nanms | |
step:8/1390 loss:7.1582 train_time:57326ms step_avg:nanms | |
step:9/1390 loss:7.2173 train_time:58428ms step_avg:nanms | |
step:10/1390 loss:7.0012 train_time:59526ms step_avg:nanms | |
step:11/1390 loss:6.9733 train_time:1098ms step_avg:nanms | |
step:12/1390 loss:6.8837 train_time:2193ms step_avg:nanms | |
step:13/1390 loss:6.8369 train_time:3287ms step_avg:1095.55ms | |
step:14/1390 loss:6.7214 train_time:4383ms step_avg:1095.86ms | |
step:15/1390 loss:6.7175 train_time:5482ms step_avg:1096.42ms | |
step:16/1390 loss:6.6534 train_time:6577ms step_avg:1096.17ms | |
step:17/1390 loss:6.6479 train_time:7681ms step_avg:1097.28ms | |
step:18/1390 loss:6.6857 train_time:8787ms step_avg:1098.41ms | |
step:19/1390 loss:6.6997 train_time:9891ms step_avg:1099.01ms | |
step:20/1390 loss:6.5082 train_time:10989ms step_avg:1098.89ms | |
step:21/1390 loss:6.5361 train_time:12091ms step_avg:1099.20ms | |
step:22/1390 loss:6.6050 train_time:13191ms step_avg:1099.25ms | |
step:23/1390 loss:6.4230 train_time:14283ms step_avg:1098.68ms | |
step:24/1390 loss:6.4659 train_time:15381ms step_avg:1098.62ms | |
step:25/1390 loss:6.3495 train_time:16487ms step_avg:1099.11ms | |
step:25/1390 val_loss:6.4518 train_time:16517ms step_avg:1101.11ms | |
step:26/1390 loss:6.4520 train_time:17642ms step_avg:1102.62ms | |
step:27/1390 loss:6.1280 train_time:18739ms step_avg:1102.30ms | |
step:28/1390 loss:6.1576 train_time:19837ms step_avg:1102.07ms | |
step:29/1390 loss:6.1492 train_time:20937ms step_avg:1101.95ms | |
step:30/1390 loss:6.1138 train_time:22028ms step_avg:1101.38ms | |
step:31/1390 loss:6.1508 train_time:23128ms step_avg:1101.32ms | |
step:32/1390 loss:6.0523 train_time:24226ms step_avg:1101.16ms | |
step:33/1390 loss:6.1129 train_time:25322ms step_avg:1100.98ms | |
step:34/1390 loss:6.0926 train_time:26425ms step_avg:1101.04ms | |
step:35/1390 loss:6.1715 train_time:27531ms step_avg:1101.25ms | |
step:36/1390 loss:6.0990 train_time:28628ms step_avg:1101.06ms | |
step:37/1390 loss:6.1261 train_time:29736ms step_avg:1101.35ms | |
step:38/1390 loss:6.0865 train_time:30837ms step_avg:1101.31ms | |
step:39/1390 loss:6.1251 train_time:31938ms step_avg:1101.30ms | |
step:40/1390 loss:6.0236 train_time:33036ms step_avg:1101.19ms | |
step:41/1390 loss:6.0626 train_time:34133ms step_avg:1101.08ms | |
step:42/1390 loss:6.0090 train_time:35231ms step_avg:1100.96ms | |
step:43/1390 loss:6.0947 train_time:36330ms step_avg:1100.91ms | |
step:44/1390 loss:6.0088 train_time:37433ms step_avg:1100.98ms | |
step:45/1390 loss:5.9916 train_time:38535ms step_avg:1101.00ms | |
step:46/1390 loss:6.0133 train_time:39635ms step_avg:1100.97ms | |
step:47/1390 loss:5.9783 train_time:40738ms step_avg:1101.02ms | |
step:48/1390 loss:6.0124 train_time:41845ms step_avg:1101.18ms | |
step:49/1390 loss:5.9458 train_time:42957ms step_avg:1101.46ms | |
step:50/1390 loss:5.9646 train_time:44063ms step_avg:1101.57ms | |
step:50/1390 val_loss:5.6122 train_time:44095ms step_avg:1102.37ms | |
step:51/1390 loss:5.5663 train_time:45215ms step_avg:1102.81ms | |
step:52/1390 loss:5.5753 train_time:46320ms step_avg:1102.85ms | |
step:53/1390 loss:5.5685 train_time:47423ms step_avg:1102.85ms | |
step:54/1390 loss:5.5908 train_time:48516ms step_avg:1102.65ms | |
step:55/1390 loss:5.6310 train_time:49617ms step_avg:1102.61ms | |
step:56/1390 loss:5.6214 train_time:50714ms step_avg:1102.48ms | |
step:57/1390 loss:5.6372 train_time:51814ms step_avg:1102.44ms | |
step:58/1390 loss:5.6802 train_time:52911ms step_avg:1102.32ms | |
step:59/1390 loss:5.6583 train_time:54001ms step_avg:1102.06ms | |
step:60/1390 loss:5.6948 train_time:55099ms step_avg:1101.97ms | |
step:61/1390 loss:5.7023 train_time:56197ms step_avg:1101.89ms | |
step:62/1390 loss:5.7134 train_time:57301ms step_avg:1101.95ms | |
step:63/1390 loss:5.6757 train_time:58397ms step_avg:1101.84ms | |
step:64/1390 loss:5.8093 train_time:59495ms step_avg:1101.76ms | |
step:65/1390 loss:5.6793 train_time:60588ms step_avg:1101.60ms | |
step:66/1390 loss:5.7294 train_time:61687ms step_avg:1101.56ms | |
step:67/1390 loss:5.7175 train_time:62787ms step_avg:1101.53ms | |
step:68/1390 loss:5.8336 train_time:63886ms step_avg:1101.48ms | |
step:69/1390 loss:5.7350 train_time:64992ms step_avg:1101.56ms | |
step:70/1390 loss:5.7259 train_time:66090ms step_avg:1101.50ms | |
step:71/1390 loss:5.7102 train_time:67192ms step_avg:1101.52ms | |
step:72/1390 loss:5.7089 train_time:68292ms step_avg:1101.49ms | |
step:73/1390 loss:5.7198 train_time:69388ms step_avg:1101.40ms | |
step:74/1390 loss:5.7230 train_time:70491ms step_avg:1101.43ms | |
step:75/1390 loss:5.7054 train_time:71591ms step_avg:1101.39ms | |
step:75/1390 val_loss:5.2850 train_time:71620ms step_avg:1101.84ms | |
step:76/1390 loss:5.2612 train_time:72737ms step_avg:1102.08ms | |
step:77/1390 loss:5.3209 train_time:73837ms step_avg:1102.05ms | |
step:78/1390 loss:5.3382 train_time:74936ms step_avg:1102.00ms | |
step:79/1390 loss:5.1756 train_time:76033ms step_avg:1101.93ms | |
step:80/1390 loss:5.3560 train_time:77138ms step_avg:1101.97ms | |
step:81/1390 loss:5.3357 train_time:78238ms step_avg:1101.95ms | |
step:82/1390 loss:5.5443 train_time:79354ms step_avg:1102.14ms | |
step:83/1390 loss:5.4461 train_time:80456ms step_avg:1102.13ms | |
step:84/1390 loss:5.4418 train_time:81556ms step_avg:1102.10ms | |
step:85/1390 loss:5.4272 train_time:82643ms step_avg:1101.91ms | |
step:86/1390 loss:5.4724 train_time:83735ms step_avg:1101.78ms | |
step:87/1390 loss:5.4819 train_time:84840ms step_avg:1101.82ms | |
step:88/1390 loss:5.4828 train_time:85943ms step_avg:1101.84ms | |
step:89/1390 loss:5.5059 train_time:87035ms step_avg:1101.72ms | |
step:90/1390 loss:5.4780 train_time:88136ms step_avg:1101.70ms | |
step:91/1390 loss:5.5065 train_time:89235ms step_avg:1101.67ms | |
step:92/1390 loss:5.5161 train_time:90339ms step_avg:1101.69ms | |
step:93/1390 loss:5.5123 train_time:91443ms step_avg:1101.73ms | |
step:94/1390 loss:5.5286 train_time:92554ms step_avg:1101.83ms | |
step:95/1390 loss:5.4991 train_time:93656ms step_avg:1101.84ms | |
step:96/1390 loss:5.4461 train_time:94759ms step_avg:1101.85ms | |
step:97/1390 loss:5.5684 train_time:95857ms step_avg:1101.81ms | |
step:98/1390 loss:5.5211 train_time:96957ms step_avg:1101.78ms | |
step:99/1390 loss:5.5091 train_time:98048ms step_avg:1101.66ms | |
step:100/1390 loss:5.4674 train_time:99150ms step_avg:1101.66ms | |
step:100/1390 val_loss:5.0878 train_time:99180ms step_avg:1102.00ms | |
step:101/1390 loss:5.0515 train_time:100306ms step_avg:1102.27ms | |
step:102/1390 loss:5.0378 train_time:101404ms step_avg:1102.22ms | |
step:103/1390 loss:5.0928 train_time:102510ms step_avg:1102.26ms | |
step:104/1390 loss:5.1458 train_time:103644ms step_avg:1102.60ms | |
step:105/1390 loss:5.1263 train_time:104772ms step_avg:1102.86ms | |
step:106/1390 loss:5.1871 train_time:105896ms step_avg:1103.09ms | |
step:107/1390 loss:5.1736 train_time:107031ms step_avg:1103.41ms | |
step:108/1390 loss:5.1330 train_time:108160ms step_avg:1103.67ms | |
step:109/1390 loss:5.2096 train_time:109299ms step_avg:1104.03ms | |
step:110/1390 loss:5.2753 train_time:110432ms step_avg:1104.32ms | |
step:111/1390 loss:5.1915 train_time:111574ms step_avg:1104.69ms | |
step:112/1390 loss:5.2634 train_time:112700ms step_avg:1104.90ms | |
step:113/1390 loss:5.2935 train_time:113828ms step_avg:1105.13ms | |
step:114/1390 loss:5.2409 train_time:114954ms step_avg:1105.32ms | |
step:115/1390 loss:5.2732 train_time:116076ms step_avg:1105.48ms | |
step:116/1390 loss:5.2781 train_time:117206ms step_avg:1105.71ms | |
step:117/1390 loss:5.2518 train_time:118344ms step_avg:1106.01ms | |
step:118/1390 loss:5.2443 train_time:119485ms step_avg:1106.35ms | |
step:119/1390 loss:5.2685 train_time:120616ms step_avg:1106.57ms | |
step:120/1390 loss:5.2445 train_time:121745ms step_avg:1106.77ms | |
step:121/1390 loss:5.2631 train_time:122873ms step_avg:1106.96ms | |
step:122/1390 loss:5.1877 train_time:124006ms step_avg:1107.20ms | |
step:123/1390 loss:5.2792 train_time:125147ms step_avg:1107.49ms | |
step:124/1390 loss:5.2308 train_time:126289ms step_avg:1107.80ms | |
step:125/1390 loss:5.2433 train_time:127431ms step_avg:1108.09ms | |
step:125/1390 val_loss:4.8284 train_time:127462ms step_avg:1108.36ms | |
step:126/1390 loss:4.8003 train_time:128594ms step_avg:1108.57ms | |
step:127/1390 loss:4.9054 train_time:129725ms step_avg:1108.76ms | |
step:128/1390 loss:4.8452 train_time:130853ms step_avg:1108.92ms | |
step:129/1390 loss:4.9547 train_time:131983ms step_avg:1109.10ms | |
step:130/1390 loss:4.8665 train_time:133114ms step_avg:1109.28ms | |
step:131/1390 loss:4.9676 train_time:134244ms step_avg:1109.46ms | |
step:132/1390 loss:4.9702 train_time:135375ms step_avg:1109.63ms | |
step:133/1390 loss:4.9353 train_time:136510ms step_avg:1109.83ms | |
step:134/1390 loss:4.9889 train_time:137657ms step_avg:1110.13ms | |
step:135/1390 loss:5.0322 train_time:138797ms step_avg:1110.38ms | |
step:136/1390 loss:4.9782 train_time:139932ms step_avg:1110.57ms | |
step:137/1390 loss:5.0297 train_time:141064ms step_avg:1110.74ms | |
step:138/1390 loss:5.0535 train_time:142193ms step_avg:1110.88ms | |
step:139/1390 loss:5.0369 train_time:143321ms step_avg:1111.02ms | |
step:140/1390 loss:5.1025 train_time:144457ms step_avg:1111.21ms | |
step:141/1390 loss:5.0309 train_time:145595ms step_avg:1111.41ms | |
step:142/1390 loss:5.1418 train_time:146740ms step_avg:1111.66ms | |
step:143/1390 loss:5.0845 train_time:147869ms step_avg:1111.80ms | |
step:144/1390 loss:5.1090 train_time:149000ms step_avg:1111.94ms | |
step:145/1390 loss:5.0911 train_time:150136ms step_avg:1112.12ms | |
step:146/1390 loss:5.0227 train_time:151274ms step_avg:1112.31ms | |
step:147/1390 loss:5.0700 train_time:152420ms step_avg:1112.56ms | |
step:148/1390 loss:5.1115 train_time:153568ms step_avg:1112.81ms | |
step:149/1390 loss:5.1424 train_time:154718ms step_avg:1113.08ms | |
step:150/1390 loss:5.1087 train_time:155867ms step_avg:1113.34ms | |
step:150/1390 val_loss:4.6548 train_time:155898ms step_avg:1113.56ms | |
step:151/1390 loss:4.6611 train_time:157042ms step_avg:1113.77ms | |
step:152/1390 loss:4.5846 train_time:158167ms step_avg:1113.86ms | |
step:153/1390 loss:4.6539 train_time:159290ms step_avg:1113.92ms | |
step:154/1390 loss:4.6878 train_time:160423ms step_avg:1114.05ms | |
step:155/1390 loss:4.7625 train_time:161547ms step_avg:1114.12ms | |
step:156/1390 loss:4.7973 train_time:162680ms step_avg:1114.25ms | |
step:157/1390 loss:4.8234 train_time:163810ms step_avg:1114.35ms | |
step:158/1390 loss:4.7586 train_time:164945ms step_avg:1114.49ms | |
step:159/1390 loss:4.7981 train_time:166081ms step_avg:1114.64ms | |
step:160/1390 loss:4.8301 train_time:167215ms step_avg:1114.76ms | |
step:161/1390 loss:4.8438 train_time:168341ms step_avg:1114.84ms | |
step:162/1390 loss:4.8606 train_time:169472ms step_avg:1114.94ms | |
step:163/1390 loss:4.8409 train_time:170602ms step_avg:1115.04ms | |
step:164/1390 loss:4.8894 train_time:171725ms step_avg:1115.10ms | |
step:165/1390 loss:4.8516 train_time:172847ms step_avg:1115.14ms | |
step:166/1390 loss:4.9263 train_time:173974ms step_avg:1115.22ms | |
step:167/1390 loss:4.8578 train_time:175103ms step_avg:1115.31ms | |
step:168/1390 loss:4.8915 train_time:176227ms step_avg:1115.36ms | |
step:169/1390 loss:4.8790 train_time:177346ms step_avg:1115.39ms | |
step:170/1390 loss:4.8466 train_time:178472ms step_avg:1115.45ms | |
step:171/1390 loss:4.8667 train_time:179607ms step_avg:1115.57ms | |
step:172/1390 loss:4.9603 train_time:180734ms step_avg:1115.64ms | |
step:173/1390 loss:4.9060 train_time:181872ms step_avg:1115.78ms | |
step:174/1390 loss:4.9465 train_time:183009ms step_avg:1115.91ms | |
step:175/1390 loss:4.9465 train_time:184148ms step_avg:1116.05ms | |
step:175/1390 val_loss:4.5102 train_time:184180ms step_avg:1116.24ms | |
step:176/1390 loss:4.5037 train_time:185330ms step_avg:1116.45ms | |
step:177/1390 loss:4.4939 train_time:186459ms step_avg:1116.52ms | |
step:178/1390 loss:4.5313 train_time:187589ms step_avg:1116.60ms | |
step:179/1390 loss:4.5628 train_time:188722ms step_avg:1116.70ms | |
step:180/1390 loss:4.5596 train_time:189852ms step_avg:1116.78ms | |
step:181/1390 loss:4.5742 train_time:190981ms step_avg:1116.85ms | |
step:182/1390 loss:4.6344 train_time:192108ms step_avg:1116.91ms | |
step:183/1390 loss:4.5889 train_time:193245ms step_avg:1117.02ms | |
step:184/1390 loss:4.6845 train_time:194372ms step_avg:1117.08ms | |
step:185/1390 loss:4.6662 train_time:195498ms step_avg:1117.13ms | |
step:186/1390 loss:4.6627 train_time:196621ms step_avg:1117.16ms | |
step:187/1390 loss:4.7766 train_time:197743ms step_avg:1117.19ms | |
step:188/1390 loss:4.7450 train_time:198869ms step_avg:1117.24ms | |
step:189/1390 loss:4.6698 train_time:200016ms step_avg:1117.41ms | |
step:190/1390 loss:4.7349 train_time:201155ms step_avg:1117.53ms | |
step:191/1390 loss:4.6749 train_time:202326ms step_avg:1117.82ms | |
step:192/1390 loss:4.8040 train_time:203455ms step_avg:1117.88ms | |
step:193/1390 loss:4.7412 train_time:204595ms step_avg:1118.01ms | |
step:194/1390 loss:4.8468 train_time:205737ms step_avg:1118.14ms | |
step:195/1390 loss:4.8236 train_time:206863ms step_avg:1118.18ms | |
step:196/1390 loss:4.8575 train_time:207998ms step_avg:1118.27ms | |
step:197/1390 loss:4.7883 train_time:209138ms step_avg:1118.38ms | |
step:198/1390 loss:4.8329 train_time:210279ms step_avg:1118.50ms | |
step:199/1390 loss:4.7948 train_time:211423ms step_avg:1118.64ms | |
step:200/1390 loss:4.9771 train_time:212562ms step_avg:1118.75ms | |
step:200/1390 val_loss:4.4037 train_time:212594ms step_avg:1118.91ms | |
step:201/1390 loss:4.4124 train_time:213737ms step_avg:1119.04ms | |
step:202/1390 loss:4.4274 train_time:214865ms step_avg:1119.09ms | |
step:203/1390 loss:4.4953 train_time:215991ms step_avg:1119.12ms | |
step:204/1390 loss:4.4737 train_time:217119ms step_avg:1119.17ms | |
step:205/1390 loss:4.4840 train_time:218251ms step_avg:1119.24ms | |
step:206/1390 loss:4.4623 train_time:219378ms step_avg:1119.28ms | |
step:207/1390 loss:4.5502 train_time:220525ms step_avg:1119.42ms | |
step:208/1390 loss:4.5410 train_time:221678ms step_avg:1119.59ms | |
step:209/1390 loss:4.4804 train_time:222817ms step_avg:1119.68ms | |
step:210/1390 loss:4.5907 train_time:223969ms step_avg:1119.85ms | |
step:211/1390 loss:4.5625 train_time:225120ms step_avg:1120.00ms | |
step:212/1390 loss:4.5980 train_time:226267ms step_avg:1120.13ms | |
step:213/1390 loss:4.6060 train_time:227419ms step_avg:1120.29ms | |
step:214/1390 loss:4.6204 train_time:228559ms step_avg:1120.39ms | |
step:215/1390 loss:4.6356 train_time:229708ms step_avg:1120.53ms | |
step:216/1390 loss:4.5993 train_time:230858ms step_avg:1120.67ms | |
step:217/1390 loss:4.6894 train_time:232007ms step_avg:1120.80ms | |
step:218/1390 loss:4.6277 train_time:233146ms step_avg:1120.89ms | |
step:219/1390 loss:4.6618 train_time:234287ms step_avg:1120.99ms | |
step:220/1390 loss:4.6604 train_time:235435ms step_avg:1121.12ms | |
step:221/1390 loss:4.6383 train_time:236582ms step_avg:1121.24ms | |
step:222/1390 loss:4.6656 train_time:237731ms step_avg:1121.37ms | |
step:223/1390 loss:4.7576 train_time:238880ms step_avg:1121.50ms | |
step:224/1390 loss:4.7257 train_time:240032ms step_avg:1121.64ms | |
step:225/1390 loss:4.6622 train_time:241191ms step_avg:1121.82ms | |
step:225/1390 val_loss:4.2942 train_time:241221ms step_avg:1121.96ms | |
step:226/1390 loss:4.2750 train_time:242388ms step_avg:1122.17ms | |
step:227/1390 loss:4.2835 train_time:243547ms step_avg:1122.33ms | |
step:228/1390 loss:4.3528 train_time:244710ms step_avg:1122.52ms | |
step:229/1390 loss:4.3683 train_time:245871ms step_avg:1122.70ms | |
step:230/1390 loss:4.3258 train_time:247022ms step_avg:1122.83ms | |
step:231/1390 loss:4.3711 train_time:248178ms step_avg:1122.98ms | |
step:232/1390 loss:4.3694 train_time:249331ms step_avg:1123.11ms | |
step:233/1390 loss:4.4545 train_time:250487ms step_avg:1123.26ms | |
step:234/1390 loss:4.4731 train_time:251630ms step_avg:1123.35ms | |
step:235/1390 loss:4.5217 train_time:252773ms step_avg:1123.44ms | |
step:236/1390 loss:4.5394 train_time:253933ms step_avg:1123.60ms | |
step:237/1390 loss:4.5707 train_time:255082ms step_avg:1123.71ms | |
step:238/1390 loss:4.5600 train_time:256238ms step_avg:1123.85ms | |
step:239/1390 loss:4.5079 train_time:257384ms step_avg:1123.95ms | |
step:240/1390 loss:4.5584 train_time:258541ms step_avg:1124.09ms | |
step:241/1390 loss:4.6987 train_time:259695ms step_avg:1124.22ms | |
step:242/1390 loss:4.6687 train_time:260853ms step_avg:1124.37ms | |
step:243/1390 loss:4.5599 train_time:262005ms step_avg:1124.49ms | |
step:244/1390 loss:4.6451 train_time:263156ms step_avg:1124.60ms | |
step:245/1390 loss:4.5706 train_time:264311ms step_avg:1124.73ms | |
step:246/1390 loss:4.6125 train_time:265460ms step_avg:1124.83ms | |
step:247/1390 loss:4.5534 train_time:266611ms step_avg:1124.94ms | |
step:248/1390 loss:4.6210 train_time:267761ms step_avg:1125.05ms | |
step:249/1390 loss:4.6331 train_time:268913ms step_avg:1125.16ms | |
step:250/1390 loss:4.6119 train_time:270064ms step_avg:1125.27ms | |
step:250/1390 val_loss:4.2274 train_time:270094ms step_avg:1125.39ms | |
step:251/1390 loss:4.2733 train_time:271268ms step_avg:1125.59ms | |
step:252/1390 loss:4.2784 train_time:272421ms step_avg:1125.71ms | |
step:253/1390 loss:4.2504 train_time:273583ms step_avg:1125.86ms | |
step:254/1390 loss:4.2327 train_time:274736ms step_avg:1125.97ms | |
step:255/1390 loss:4.3345 train_time:275890ms step_avg:1126.08ms | |
step:256/1390 loss:4.3035 train_time:277033ms step_avg:1126.15ms | |
step:257/1390 loss:4.3428 train_time:278176ms step_avg:1126.22ms | |
step:258/1390 loss:4.4499 train_time:279322ms step_avg:1126.30ms | |
step:259/1390 loss:4.3655 train_time:280475ms step_avg:1126.40ms | |
step:260/1390 loss:4.3684 train_time:281621ms step_avg:1126.48ms | |
step:261/1390 loss:4.3888 train_time:282774ms step_avg:1126.59ms | |
step:262/1390 loss:4.4816 train_time:283943ms step_avg:1126.76ms | |
step:263/1390 loss:4.4643 train_time:285107ms step_avg:1126.90ms | |
step:264/1390 loss:4.5294 train_time:286272ms step_avg:1127.05ms | |
step:265/1390 loss:4.4026 train_time:287423ms step_avg:1127.15ms | |
step:266/1390 loss:4.5095 train_time:288580ms step_avg:1127.27ms | |
step:267/1390 loss:4.4753 train_time:289744ms step_avg:1127.41ms | |
step:268/1390 loss:4.4745 train_time:290897ms step_avg:1127.51ms | |
step:269/1390 loss:4.5331 train_time:292058ms step_avg:1127.64ms | |
step:270/1390 loss:4.5326 train_time:293227ms step_avg:1127.80ms | |
step:271/1390 loss:4.5935 train_time:294395ms step_avg:1127.95ms | |
step:272/1390 loss:4.5339 train_time:295564ms step_avg:1128.11ms | |
step:273/1390 loss:4.5797 train_time:296721ms step_avg:1128.22ms | |
step:274/1390 loss:4.5227 train_time:297891ms step_avg:1128.37ms | |
step:275/1390 loss:4.5705 train_time:299059ms step_avg:1128.53ms | |
step:275/1390 val_loss:4.1689 train_time:299091ms step_avg:1128.65ms | |
step:276/1390 loss:4.2449 train_time:300243ms step_avg:1128.73ms | |
step:277/1390 loss:4.1451 train_time:301408ms step_avg:1128.87ms | |
step:278/1390 loss:4.2127 train_time:302569ms step_avg:1128.99ms | |
step:279/1390 loss:4.1609 train_time:303737ms step_avg:1129.13ms | |
step:280/1390 loss:4.3040 train_time:304885ms step_avg:1129.21ms | |
step:281/1390 loss:4.3114 train_time:306039ms step_avg:1129.30ms | |
step:282/1390 loss:4.2950 train_time:307199ms step_avg:1129.41ms | |
step:283/1390 loss:4.3248 train_time:308365ms step_avg:1129.54ms | |
step:284/1390 loss:4.2845 train_time:309535ms step_avg:1129.69ms | |
step:285/1390 loss:4.3866 train_time:310698ms step_avg:1129.81ms | |
step:286/1390 loss:4.3813 train_time:311863ms step_avg:1129.94ms | |
step:287/1390 loss:4.3610 train_time:313033ms step_avg:1130.08ms | |
step:288/1390 loss:4.4061 train_time:314191ms step_avg:1130.18ms | |
step:289/1390 loss:4.4113 train_time:315339ms step_avg:1130.25ms | |
step:290/1390 loss:4.4094 train_time:316491ms step_avg:1130.33ms | |
step:291/1390 loss:4.4270 train_time:317637ms step_avg:1130.38ms | |
step:292/1390 loss:4.4888 train_time:318786ms step_avg:1130.45ms | |
step:293/1390 loss:4.4360 train_time:319937ms step_avg:1130.52ms | |
step:294/1390 loss:4.6190 train_time:321085ms step_avg:1130.58ms | |
step:295/1390 loss:4.4713 train_time:322237ms step_avg:1130.65ms | |
step:296/1390 loss:4.4769 train_time:323393ms step_avg:1130.75ms | |
step:297/1390 loss:4.5207 train_time:324537ms step_avg:1130.79ms | |
step:298/1390 loss:4.5129 train_time:325689ms step_avg:1130.87ms | |
step:299/1390 loss:4.4952 train_time:326845ms step_avg:1130.95ms | |
step:300/1390 loss:4.5021 train_time:327995ms step_avg:1131.02ms | |
step:300/1390 val_loss:4.1235 train_time:328026ms step_avg:1131.12ms | |
step:301/1390 loss:4.1767 train_time:329184ms step_avg:1131.22ms | |
step:302/1390 loss:4.1031 train_time:330336ms step_avg:1131.29ms | |
step:303/1390 loss:4.1583 train_time:331485ms step_avg:1131.35ms | |
step:304/1390 loss:4.1696 train_time:332624ms step_avg:1131.38ms | |
step:305/1390 loss:4.2073 train_time:333781ms step_avg:1131.46ms | |
step:306/1390 loss:4.1892 train_time:334947ms step_avg:1131.58ms | |
step:307/1390 loss:4.2451 train_time:336108ms step_avg:1131.68ms | |
step:308/1390 loss:4.2516 train_time:337258ms step_avg:1131.74ms | |
step:309/1390 loss:4.2928 train_time:338418ms step_avg:1131.83ms | |
step:310/1390 loss:4.3504 train_time:339600ms step_avg:1132.00ms | |
step:311/1390 loss:4.3548 train_time:340778ms step_avg:1132.15ms | |
step:312/1390 loss:4.2962 train_time:341956ms step_avg:1132.31ms | |
step:313/1390 loss:4.3664 train_time:343136ms step_avg:1132.46ms | |
step:314/1390 loss:4.3291 train_time:344322ms step_avg:1132.64ms | |
step:315/1390 loss:4.3215 train_time:345510ms step_avg:1132.82ms | |
step:316/1390 loss:4.3483 train_time:346692ms step_avg:1132.98ms | |
step:317/1390 loss:4.3533 train_time:347870ms step_avg:1133.13ms | |
step:318/1390 loss:4.3587 train_time:349040ms step_avg:1133.25ms | |
step:319/1390 loss:4.4205 train_time:350216ms step_avg:1133.38ms | |
step:320/1390 loss:4.4113 train_time:351402ms step_avg:1133.56ms | |
step:321/1390 loss:4.4514 train_time:352576ms step_avg:1133.69ms | |
step:322/1390 loss:4.4102 train_time:353750ms step_avg:1133.81ms | |
step:323/1390 loss:4.4693 train_time:354926ms step_avg:1133.95ms | |
step:324/1390 loss:4.4321 train_time:356097ms step_avg:1134.07ms | |
step:325/1390 loss:4.5249 train_time:357282ms step_avg:1134.23ms | |
step:325/1390 val_loss:4.0576 train_time:357314ms step_avg:1134.33ms | |
step:326/1390 loss:4.0745 train_time:358488ms step_avg:1134.46ms | |
step:327/1390 loss:4.2158 train_time:359658ms step_avg:1134.57ms | |
step:328/1390 loss:4.1396 train_time:360818ms step_avg:1134.65ms | |
step:329/1390 loss:4.1355 train_time:361985ms step_avg:1134.75ms | |
step:330/1390 loss:4.0821 train_time:363161ms step_avg:1134.88ms | |
step:331/1390 loss:4.2098 train_time:364343ms step_avg:1135.02ms | |
step:332/1390 loss:4.1452 train_time:365522ms step_avg:1135.16ms | |
step:333/1390 loss:4.1351 train_time:366705ms step_avg:1135.31ms | |
step:334/1390 loss:4.1926 train_time:367889ms step_avg:1135.46ms | |
step:335/1390 loss:4.2986 train_time:369061ms step_avg:1135.57ms | |
step:336/1390 loss:4.2733 train_time:370229ms step_avg:1135.67ms | |
step:337/1390 loss:4.5629 train_time:371400ms step_avg:1135.78ms | |
step:338/1390 loss:4.2832 train_time:372581ms step_avg:1135.92ms | |
step:339/1390 loss:4.2578 train_time:373754ms step_avg:1136.03ms | |
step:340/1390 loss:4.2774 train_time:374929ms step_avg:1136.15ms | |
step:341/1390 loss:4.3334 train_time:376102ms step_avg:1136.26ms | |
step:342/1390 loss:4.3243 train_time:377281ms step_avg:1136.39ms | |
step:343/1390 loss:4.3987 train_time:378469ms step_avg:1136.54ms | |
step:344/1390 loss:4.3569 train_time:379638ms step_avg:1136.64ms | |
step:345/1390 loss:4.3232 train_time:380813ms step_avg:1136.76ms | |
step:346/1390 loss:4.3413 train_time:381987ms step_avg:1136.87ms | |
step:347/1390 loss:4.3462 train_time:383159ms step_avg:1136.97ms | |
step:348/1390 loss:4.4049 train_time:384335ms step_avg:1137.09ms | |
step:349/1390 loss:4.3326 train_time:385501ms step_avg:1137.17ms | |
step:350/1390 loss:4.3316 train_time:386669ms step_avg:1137.26ms | |
step:350/1390 val_loss:4.0226 train_time:386698ms step_avg:1137.35ms | |
step:351/1390 loss:3.9648 train_time:387867ms step_avg:1137.44ms | |
step:352/1390 loss:4.0778 train_time:389034ms step_avg:1137.53ms | |
step:353/1390 loss:3.9981 train_time:390198ms step_avg:1137.60ms | |
step:354/1390 loss:4.0089 train_time:391380ms step_avg:1137.73ms | |
step:355/1390 loss:4.1227 train_time:392549ms step_avg:1137.82ms | |
step:356/1390 loss:4.0582 train_time:393723ms step_avg:1137.93ms | |
step:357/1390 loss:4.1496 train_time:394902ms step_avg:1138.05ms | |
step:358/1390 loss:4.1712 train_time:396084ms step_avg:1138.17ms | |
step:359/1390 loss:4.0948 train_time:397264ms step_avg:1138.29ms | |
step:360/1390 loss:4.1301 train_time:398439ms step_avg:1138.40ms | |
step:361/1390 loss:4.2611 train_time:399626ms step_avg:1138.54ms | |
step:362/1390 loss:4.2260 train_time:400806ms step_avg:1138.65ms | |
step:363/1390 loss:4.2000 train_time:401976ms step_avg:1138.74ms | |
step:364/1390 loss:4.2483 train_time:403143ms step_avg:1138.82ms | |
step:365/1390 loss:4.2976 train_time:404324ms step_avg:1138.94ms | |
step:366/1390 loss:4.2584 train_time:405497ms step_avg:1139.04ms | |
step:367/1390 loss:4.3085 train_time:406672ms step_avg:1139.14ms | |
step:368/1390 loss:4.3005 train_time:407846ms step_avg:1139.24ms | |
step:369/1390 loss:4.3164 train_time:409022ms step_avg:1139.34ms | |
step:370/1390 loss:4.2914 train_time:410206ms step_avg:1139.46ms | |
step:371/1390 loss:4.3341 train_time:411375ms step_avg:1139.54ms | |
step:372/1390 loss:4.3321 train_time:412539ms step_avg:1139.61ms | |
step:373/1390 loss:4.3555 train_time:413714ms step_avg:1139.71ms | |
step:374/1390 loss:4.3839 train_time:414880ms step_avg:1139.78ms | |
step:375/1390 loss:4.3623 train_time:416059ms step_avg:1139.89ms | |
step:375/1390 val_loss:3.9835 train_time:416090ms step_avg:1139.97ms | |
step:376/1390 loss:4.1045 train_time:417258ms step_avg:1140.05ms | |
step:377/1390 loss:3.8956 train_time:418425ms step_avg:1140.12ms | |
step:378/1390 loss:4.0283 train_time:419588ms step_avg:1140.19ms | |
step:379/1390 loss:4.0454 train_time:420751ms step_avg:1140.25ms | |
step:380/1390 loss:4.0255 train_time:421929ms step_avg:1140.35ms | |
step:381/1390 loss:4.1180 train_time:423149ms step_avg:1140.56ms | |
step:382/1390 loss:4.0695 train_time:424319ms step_avg:1140.64ms | |
step:383/1390 loss:4.1096 train_time:425485ms step_avg:1140.71ms | |
step:384/1390 loss:4.1078 train_time:426649ms step_avg:1140.77ms | |
step:385/1390 loss:4.1665 train_time:427826ms step_avg:1140.87ms | |
step:386/1390 loss:4.1703 train_time:428994ms step_avg:1140.94ms | |
step:387/1390 loss:4.2106 train_time:430175ms step_avg:1141.05ms | |
step:388/1390 loss:4.2096 train_time:431360ms step_avg:1141.16ms | |
step:389/1390 loss:4.1694 train_time:432531ms step_avg:1141.24ms | |
step:390/1390 loss:4.2428 train_time:433697ms step_avg:1141.31ms | |
step:391/1390 loss:4.1795 train_time:434869ms step_avg:1141.39ms | |
step:392/1390 loss:4.2365 train_time:436049ms step_avg:1141.49ms | |
step:393/1390 loss:4.2888 train_time:437235ms step_avg:1141.61ms | |
step:394/1390 loss:4.2954 train_time:438423ms step_avg:1141.73ms | |
step:395/1390 loss:4.2221 train_time:439598ms step_avg:1141.81ms | |
step:396/1390 loss:4.1558 train_time:440778ms step_avg:1141.91ms | |
step:397/1390 loss:4.3548 train_time:441961ms step_avg:1142.02ms | |
step:398/1390 loss:4.2954 train_time:443136ms step_avg:1142.10ms | |
step:399/1390 loss:4.2571 train_time:444307ms step_avg:1142.18ms | |
step:400/1390 loss:4.3640 train_time:445485ms step_avg:1142.27ms | |
step:400/1390 val_loss:3.9492 train_time:445516ms step_avg:1142.35ms | |
step:401/1390 loss:3.9440 train_time:446696ms step_avg:1142.45ms | |
step:402/1390 loss:3.9687 train_time:447872ms step_avg:1142.53ms | |
step:403/1390 loss:3.9735 train_time:449049ms step_avg:1142.62ms | |
step:404/1390 loss:3.9810 train_time:450231ms step_avg:1142.72ms | |
step:405/1390 loss:4.0580 train_time:451409ms step_avg:1142.81ms | |
step:406/1390 loss:4.0492 train_time:452585ms step_avg:1142.89ms | |
step:407/1390 loss:4.1955 train_time:453758ms step_avg:1142.97ms | |
step:408/1390 loss:4.1093 train_time:454928ms step_avg:1143.04ms | |
step:409/1390 loss:4.0826 train_time:456110ms step_avg:1143.13ms | |
step:410/1390 loss:4.1101 train_time:457291ms step_avg:1143.23ms | |
step:411/1390 loss:4.0997 train_time:458467ms step_avg:1143.31ms | |
step:412/1390 loss:4.2052 train_time:459641ms step_avg:1143.39ms | |
step:413/1390 loss:4.1362 train_time:460842ms step_avg:1143.53ms | |
step:414/1390 loss:4.1931 train_time:462041ms step_avg:1143.67ms | |
step:415/1390 loss:4.2573 train_time:463228ms step_avg:1143.77ms | |
step:416/1390 loss:4.1782 train_time:464413ms step_avg:1143.87ms | |
step:417/1390 loss:4.2092 train_time:465596ms step_avg:1143.97ms | |
step:418/1390 loss:4.2264 train_time:466773ms step_avg:1144.05ms | |
step:419/1390 loss:4.2052 train_time:467957ms step_avg:1144.15ms | |
step:420/1390 loss:4.2063 train_time:469138ms step_avg:1144.24ms | |
step:421/1390 loss:4.2155 train_time:470312ms step_avg:1144.31ms | |
step:422/1390 loss:4.1976 train_time:471488ms step_avg:1144.39ms | |
step:423/1390 loss:4.2518 train_time:472679ms step_avg:1144.50ms | |
step:424/1390 loss:4.2783 train_time:473857ms step_avg:1144.58ms | |
step:425/1390 loss:4.3095 train_time:475031ms step_avg:1144.65ms | |
step:425/1390 val_loss:3.9127 train_time:475062ms step_avg:1144.73ms | |
step:426/1390 loss:3.8890 train_time:476242ms step_avg:1144.81ms | |
step:427/1390 loss:3.9435 train_time:477420ms step_avg:1144.89ms | |
step:428/1390 loss:3.9055 train_time:478596ms step_avg:1144.97ms | |
step:429/1390 loss:3.9021 train_time:479769ms step_avg:1145.03ms | |
step:430/1390 loss:3.9483 train_time:480955ms step_avg:1145.13ms | |
step:431/1390 loss:4.0416 train_time:482123ms step_avg:1145.19ms | |
step:432/1390 loss:4.0046 train_time:483309ms step_avg:1145.28ms | |
step:433/1390 loss:4.0912 train_time:484486ms step_avg:1145.36ms | |
step:434/1390 loss:4.0471 train_time:485666ms step_avg:1145.44ms | |
step:435/1390 loss:4.0797 train_time:486842ms step_avg:1145.51ms | |
step:436/1390 loss:4.1453 train_time:488023ms step_avg:1145.59ms | |
step:437/1390 loss:4.1031 train_time:489202ms step_avg:1145.67ms | |
step:438/1390 loss:4.1306 train_time:490387ms step_avg:1145.76ms | |
step:439/1390 loss:4.0704 train_time:491571ms step_avg:1145.85ms | |
step:440/1390 loss:4.1350 train_time:492758ms step_avg:1145.95ms | |
step:441/1390 loss:4.1030 train_time:493933ms step_avg:1146.02ms | |
step:442/1390 loss:4.1458 train_time:495107ms step_avg:1146.08ms | |
step:443/1390 loss:4.1259 train_time:496286ms step_avg:1146.16ms | |
step:444/1390 loss:4.2406 train_time:497466ms step_avg:1146.23ms | |
step:445/1390 loss:4.2159 train_time:498643ms step_avg:1146.31ms | |
step:446/1390 loss:4.2117 train_time:499826ms step_avg:1146.39ms | |
step:447/1390 loss:4.2077 train_time:501019ms step_avg:1146.50ms | |
step:448/1390 loss:4.2562 train_time:502204ms step_avg:1146.59ms | |
step:449/1390 loss:4.2349 train_time:503380ms step_avg:1146.65ms | |
step:450/1390 loss:4.2352 train_time:504558ms step_avg:1146.72ms | |
step:450/1390 val_loss:3.8838 train_time:504589ms step_avg:1146.79ms | |
step:451/1390 loss:4.0341 train_time:505789ms step_avg:1146.91ms | |
step:452/1390 loss:3.9372 train_time:506987ms step_avg:1147.03ms | |
step:453/1390 loss:3.9981 train_time:508185ms step_avg:1147.14ms | |
step:454/1390 loss:3.8504 train_time:509375ms step_avg:1147.24ms | |
step:455/1390 loss:3.9254 train_time:510558ms step_avg:1147.32ms | |
step:456/1390 loss:3.9584 train_time:511732ms step_avg:1147.38ms | |
step:457/1390 loss:3.9646 train_time:512915ms step_avg:1147.46ms | |
step:458/1390 loss:4.0345 train_time:514091ms step_avg:1147.52ms | |
step:459/1390 loss:4.0279 train_time:515271ms step_avg:1147.60ms | |
step:460/1390 loss:4.0972 train_time:516457ms step_avg:1147.68ms | |
step:461/1390 loss:4.0871 train_time:517627ms step_avg:1147.73ms | |
step:462/1390 loss:3.9833 train_time:518810ms step_avg:1147.81ms | |
step:463/1390 loss:4.0921 train_time:519995ms step_avg:1147.89ms | |
step:464/1390 loss:4.1233 train_time:521172ms step_avg:1147.95ms | |
step:465/1390 loss:4.1086 train_time:522355ms step_avg:1148.03ms | |
step:466/1390 loss:4.1387 train_time:523537ms step_avg:1148.11ms | |
step:467/1390 loss:4.1145 train_time:524716ms step_avg:1148.18ms | |
step:468/1390 loss:4.1734 train_time:525890ms step_avg:1148.23ms | |
step:469/1390 loss:4.1595 train_time:527079ms step_avg:1148.32ms | |
step:470/1390 loss:4.0882 train_time:528254ms step_avg:1148.38ms | |
step:471/1390 loss:4.2481 train_time:529445ms step_avg:1148.47ms | |
step:472/1390 loss:4.1677 train_time:530642ms step_avg:1148.58ms | |
step:473/1390 loss:4.1902 train_time:531829ms step_avg:1148.66ms | |
step:474/1390 loss:4.2027 train_time:533020ms step_avg:1148.75ms | |
step:475/1390 loss:4.2714 train_time:534189ms step_avg:1148.79ms | |
step:475/1390 val_loss:3.8580 train_time:534218ms step_avg:1148.86ms | |
step:476/1390 loss:3.8912 train_time:535422ms step_avg:1148.97ms | |
step:477/1390 loss:3.8424 train_time:536611ms step_avg:1149.06ms | |
step:478/1390 loss:3.9068 train_time:537791ms step_avg:1149.13ms | |
step:479/1390 loss:3.8570 train_time:538979ms step_avg:1149.21ms | |
step:480/1390 loss:4.0153 train_time:540171ms step_avg:1149.30ms | |
step:481/1390 loss:3.9361 train_time:541353ms step_avg:1149.37ms | |
step:482/1390 loss:3.8798 train_time:542550ms step_avg:1149.47ms | |
step:483/1390 loss:4.0107 train_time:543737ms step_avg:1149.55ms | |
step:484/1390 loss:4.0078 train_time:544928ms step_avg:1149.64ms | |
step:485/1390 loss:3.9216 train_time:546113ms step_avg:1149.71ms | |
step:486/1390 loss:4.0419 train_time:547301ms step_avg:1149.79ms | |
step:487/1390 loss:4.0198 train_time:548491ms step_avg:1149.88ms | |
step:488/1390 loss:4.0417 train_time:549678ms step_avg:1149.95ms | |
step:489/1390 loss:4.0732 train_time:550868ms step_avg:1150.04ms | |
step:490/1390 loss:4.0304 train_time:552056ms step_avg:1150.12ms | |
step:491/1390 loss:4.0813 train_time:553244ms step_avg:1150.19ms | |
step:492/1390 loss:4.1480 train_time:554434ms step_avg:1150.28ms | |
step:493/1390 loss:4.0802 train_time:555619ms step_avg:1150.35ms | |
step:494/1390 loss:4.0900 train_time:556810ms step_avg:1150.43ms | |
step:495/1390 loss:4.1609 train_time:558003ms step_avg:1150.52ms | |
step:496/1390 loss:4.0886 train_time:559192ms step_avg:1150.60ms | |
step:497/1390 loss:4.1728 train_time:560378ms step_avg:1150.67ms | |
step:498/1390 loss:4.1442 train_time:561572ms step_avg:1150.76ms | |
step:499/1390 loss:4.2253 train_time:562760ms step_avg:1150.84ms | |
step:500/1390 loss:4.1653 train_time:563953ms step_avg:1150.92ms | |
step:500/1390 val_loss:3.8343 train_time:563984ms step_avg:1150.99ms | |
step:501/1390 loss:3.9043 train_time:565170ms step_avg:1151.06ms | |
step:502/1390 loss:3.8655 train_time:566334ms step_avg:1151.08ms | |
step:503/1390 loss:3.8159 train_time:567502ms step_avg:1151.12ms | |
step:504/1390 loss:3.9187 train_time:568676ms step_avg:1151.17ms | |
step:505/1390 loss:3.8951 train_time:569855ms step_avg:1151.22ms | |
step:506/1390 loss:3.8582 train_time:571030ms step_avg:1151.27ms | |
step:507/1390 loss:4.0080 train_time:572197ms step_avg:1151.30ms | |
step:508/1390 loss:3.9221 train_time:573373ms step_avg:1151.35ms | |
step:509/1390 loss:3.9786 train_time:574541ms step_avg:1151.38ms | |
step:510/1390 loss:3.9478 train_time:575720ms step_avg:1151.44ms | |
step:511/1390 loss:3.9442 train_time:576898ms step_avg:1151.49ms | |
step:512/1390 loss:4.0151 train_time:578087ms step_avg:1151.57ms | |
step:513/1390 loss:4.0243 train_time:579266ms step_avg:1151.62ms | |
step:514/1390 loss:4.1451 train_time:580447ms step_avg:1151.68ms | |
step:515/1390 loss:4.0206 train_time:581630ms step_avg:1151.74ms | |
step:516/1390 loss:4.1086 train_time:582815ms step_avg:1151.81ms | |
step:517/1390 loss:4.0911 train_time:584010ms step_avg:1151.89ms | |
step:518/1390 loss:4.0530 train_time:585196ms step_avg:1151.96ms | |
step:519/1390 loss:4.1452 train_time:586383ms step_avg:1152.03ms | |
step:520/1390 loss:4.0762 train_time:587570ms step_avg:1152.10ms | |
step:521/1390 loss:4.0757 train_time:588758ms step_avg:1152.17ms | |
step:522/1390 loss:4.1522 train_time:589944ms step_avg:1152.23ms | |
step:523/1390 loss:4.1142 train_time:591130ms step_avg:1152.30ms | |
step:524/1390 loss:4.0971 train_time:592317ms step_avg:1152.37ms | |
step:525/1390 loss:4.0870 train_time:593514ms step_avg:1152.45ms | |
step:525/1390 val_loss:3.8097 train_time:593544ms step_avg:1152.51ms | |
step:526/1390 loss:3.8820 train_time:594743ms step_avg:1152.60ms | |
step:527/1390 loss:3.8105 train_time:595935ms step_avg:1152.68ms | |
step:528/1390 loss:3.8335 train_time:597115ms step_avg:1152.73ms | |
step:529/1390 loss:3.8828 train_time:598299ms step_avg:1152.79ms | |
step:530/1390 loss:3.8666 train_time:599487ms step_avg:1152.86ms | |
step:531/1390 loss:3.8403 train_time:600681ms step_avg:1152.94ms | |
step:532/1390 loss:3.9314 train_time:601866ms step_avg:1153.00ms | |
step:533/1390 loss:3.9127 train_time:603054ms step_avg:1153.07ms | |
step:534/1390 loss:3.9488 train_time:604243ms step_avg:1153.14ms | |
step:535/1390 loss:4.0577 train_time:605444ms step_avg:1153.23ms | |
step:536/1390 loss:3.9457 train_time:606645ms step_avg:1153.32ms | |
step:537/1390 loss:3.9302 train_time:607850ms step_avg:1153.42ms | |
step:538/1390 loss:3.9578 train_time:609054ms step_avg:1153.51ms | |
step:539/1390 loss:4.1490 train_time:610247ms step_avg:1153.59ms | |
step:540/1390 loss:3.9774 train_time:611443ms step_avg:1153.67ms | |
step:541/1390 loss:4.0769 train_time:612645ms step_avg:1153.76ms | |
step:542/1390 loss:4.0341 train_time:613844ms step_avg:1153.84ms | |
step:543/1390 loss:4.0484 train_time:615038ms step_avg:1153.92ms | |
step:544/1390 loss:4.0744 train_time:616237ms step_avg:1154.00ms | |
step:545/1390 loss:4.0472 train_time:617425ms step_avg:1154.07ms | |
step:546/1390 loss:4.0331 train_time:618627ms step_avg:1154.15ms | |
step:547/1390 loss:4.0973 train_time:619822ms step_avg:1154.23ms | |
step:548/1390 loss:4.0138 train_time:621017ms step_avg:1154.31ms | |
step:549/1390 loss:4.0801 train_time:622205ms step_avg:1154.37ms | |
step:550/1390 loss:4.1290 train_time:623401ms step_avg:1154.45ms | |
step:550/1390 val_loss:3.7906 train_time:623430ms step_avg:1154.50ms | |
step:551/1390 loss:3.7648 train_time:624631ms step_avg:1154.59ms | |
step:552/1390 loss:3.9004 train_time:625832ms step_avg:1154.67ms | |
step:553/1390 loss:3.8135 train_time:627031ms step_avg:1154.75ms | |
step:554/1390 loss:3.8111 train_time:628235ms step_avg:1154.84ms | |
step:555/1390 loss:3.8898 train_time:629431ms step_avg:1154.92ms | |
step:556/1390 loss:3.8436 train_time:630621ms step_avg:1154.98ms | |
step:557/1390 loss:3.8136 train_time:631828ms step_avg:1155.08ms | |
step:558/1390 loss:3.8895 train_time:633008ms step_avg:1155.12ms | |
step:559/1390 loss:3.9715 train_time:634201ms step_avg:1155.19ms | |
step:560/1390 loss:3.9269 train_time:635396ms step_avg:1155.26ms | |
step:561/1390 loss:3.9209 train_time:636588ms step_avg:1155.33ms | |
step:562/1390 loss:3.9319 train_time:637778ms step_avg:1155.39ms | |
step:563/1390 loss:3.9428 train_time:638976ms step_avg:1155.47ms | |
step:564/1390 loss:4.0011 train_time:640169ms step_avg:1155.54ms | |
step:565/1390 loss:3.9800 train_time:641361ms step_avg:1155.61ms | |
step:566/1390 loss:4.0312 train_time:642550ms step_avg:1155.66ms | |
step:567/1390 loss:4.0071 train_time:643749ms step_avg:1155.74ms | |
step:568/1390 loss:4.1010 train_time:644945ms step_avg:1155.82ms | |
step:569/1390 loss:4.0422 train_time:646139ms step_avg:1155.88ms | |
step:570/1390 loss:4.0217 train_time:647345ms step_avg:1155.97ms | |
step:571/1390 loss:3.9951 train_time:648577ms step_avg:1156.11ms | |
step:572/1390 loss:4.0767 train_time:649771ms step_avg:1156.18ms | |
step:573/1390 loss:4.1573 train_time:650975ms step_avg:1156.26ms | |
step:574/1390 loss:4.0559 train_time:652167ms step_avg:1156.32ms | |
step:575/1390 loss:4.1214 train_time:653371ms step_avg:1156.41ms | |
step:575/1390 val_loss:3.7703 train_time:653403ms step_avg:1156.46ms | |
step:576/1390 loss:3.7863 train_time:654606ms step_avg:1156.55ms | |
step:577/1390 loss:3.7912 train_time:655801ms step_avg:1156.62ms | |
step:578/1390 loss:3.8568 train_time:656994ms step_avg:1156.68ms | |
step:579/1390 loss:3.7778 train_time:658184ms step_avg:1156.74ms | |
step:580/1390 loss:3.8628 train_time:659385ms step_avg:1156.82ms | |
step:581/1390 loss:3.8776 train_time:660576ms step_avg:1156.88ms | |
step:582/1390 loss:3.9067 train_time:661763ms step_avg:1156.93ms | |
step:583/1390 loss:3.8919 train_time:662947ms step_avg:1156.98ms | |
step:584/1390 loss:3.8615 train_time:664135ms step_avg:1157.03ms | |
step:585/1390 loss:3.9266 train_time:665322ms step_avg:1157.08ms | |
step:586/1390 loss:4.0284 train_time:666509ms step_avg:1157.13ms | |
step:587/1390 loss:3.9050 train_time:667691ms step_avg:1157.18ms | |
step:588/1390 loss:3.9872 train_time:668871ms step_avg:1157.22ms | |
step:589/1390 loss:4.0467 train_time:670053ms step_avg:1157.26ms | |
step:590/1390 loss:3.9661 train_time:671232ms step_avg:1157.30ms | |
step:591/1390 loss:3.9966 train_time:672424ms step_avg:1157.36ms | |
step:592/1390 loss:3.9852 train_time:673614ms step_avg:1157.41ms | |
step:593/1390 loss:3.9529 train_time:674805ms step_avg:1157.47ms | |
step:594/1390 loss:4.0904 train_time:675994ms step_avg:1157.52ms | |
step:595/1390 loss:4.0461 train_time:677184ms step_avg:1157.58ms | |
step:596/1390 loss:4.0328 train_time:678371ms step_avg:1157.63ms | |
step:597/1390 loss:4.0540 train_time:679565ms step_avg:1157.69ms | |
step:598/1390 loss:4.0774 train_time:680749ms step_avg:1157.74ms | |
step:599/1390 loss:4.0368 train_time:681935ms step_avg:1157.78ms | |
step:600/1390 loss:4.0618 train_time:683120ms step_avg:1157.83ms | |
step:600/1390 val_loss:3.7517 train_time:683149ms step_avg:1157.88ms | |
step:601/1390 loss:3.7713 train_time:684344ms step_avg:1157.94ms | |
step:602/1390 loss:3.7686 train_time:685537ms step_avg:1158.00ms | |
step:603/1390 loss:3.8116 train_time:686726ms step_avg:1158.05ms | |
step:604/1390 loss:3.8400 train_time:687911ms step_avg:1158.10ms | |
step:605/1390 loss:3.8054 train_time:689106ms step_avg:1158.16ms | |
step:606/1390 loss:3.8325 train_time:690294ms step_avg:1158.21ms | |
step:607/1390 loss:3.8442 train_time:691485ms step_avg:1158.27ms | |
step:608/1390 loss:3.8525 train_time:692673ms step_avg:1158.32ms | |
step:609/1390 loss:3.8898 train_time:693859ms step_avg:1158.36ms | |
step:610/1390 loss:3.8706 train_time:695040ms step_avg:1158.40ms | |
step:611/1390 loss:3.9127 train_time:696235ms step_avg:1158.46ms | |
step:612/1390 loss:3.8521 train_time:697436ms step_avg:1158.53ms | |
step:613/1390 loss:3.9128 train_time:698628ms step_avg:1158.59ms | |
step:614/1390 loss:3.9722 train_time:699822ms step_avg:1158.65ms | |
step:615/1390 loss:3.9468 train_time:701010ms step_avg:1158.69ms | |
step:616/1390 loss:3.9825 train_time:702209ms step_avg:1158.76ms | |
step:617/1390 loss:3.9609 train_time:703403ms step_avg:1158.82ms | |
step:618/1390 loss:4.0141 train_time:704596ms step_avg:1158.87ms | |
step:619/1390 loss:4.0405 train_time:705820ms step_avg:1158.98ms | |
step:620/1390 loss:3.9793 train_time:707031ms step_avg:1159.07ms | |
step:621/1390 loss:4.1000 train_time:708239ms step_avg:1159.15ms | |
step:622/1390 loss:4.0504 train_time:709431ms step_avg:1159.20ms | |
step:623/1390 loss:4.0224 train_time:710639ms step_avg:1159.28ms | |
step:624/1390 loss:4.0852 train_time:711838ms step_avg:1159.35ms | |
step:625/1390 loss:4.0654 train_time:713051ms step_avg:1159.43ms | |
step:625/1390 val_loss:3.7320 train_time:713081ms step_avg:1159.48ms | |
step:626/1390 loss:3.7779 train_time:714299ms step_avg:1159.58ms | |
step:627/1390 loss:3.7742 train_time:715511ms step_avg:1159.66ms | |
step:628/1390 loss:3.7212 train_time:716720ms step_avg:1159.74ms | |
step:629/1390 loss:3.7611 train_time:717929ms step_avg:1159.82ms | |
step:630/1390 loss:3.7764 train_time:719138ms step_avg:1159.90ms | |
step:631/1390 loss:3.7957 train_time:720339ms step_avg:1159.97ms | |
step:632/1390 loss:3.8254 train_time:721542ms step_avg:1160.04ms | |
step:633/1390 loss:3.8492 train_time:722750ms step_avg:1160.11ms | |
step:634/1390 loss:3.8152 train_time:723956ms step_avg:1160.19ms | |
step:635/1390 loss:3.9228 train_time:725167ms step_avg:1160.27ms | |
step:636/1390 loss:3.8403 train_time:726373ms step_avg:1160.34ms | |
step:637/1390 loss:3.8204 train_time:727582ms step_avg:1160.42ms | |
step:638/1390 loss:3.9518 train_time:728791ms step_avg:1160.49ms | |
step:639/1390 loss:3.9035 train_time:729994ms step_avg:1160.56ms | |
step:640/1390 loss:3.9384 train_time:731195ms step_avg:1160.63ms | |
step:641/1390 loss:4.0245 train_time:732394ms step_avg:1160.69ms | |
step:642/1390 loss:3.8984 train_time:733605ms step_avg:1160.77ms | |
step:643/1390 loss:4.0057 train_time:734806ms step_avg:1160.83ms | |
step:644/1390 loss:3.9543 train_time:736010ms step_avg:1160.90ms | |
step:645/1390 loss:4.0038 train_time:737209ms step_avg:1160.96ms | |
step:646/1390 loss:4.0273 train_time:738397ms step_avg:1161.00ms | |
step:647/1390 loss:4.0969 train_time:739594ms step_avg:1161.06ms | |
step:648/1390 loss:4.0269 train_time:740790ms step_avg:1161.11ms | |
step:649/1390 loss:4.0180 train_time:741996ms step_avg:1161.18ms | |
step:650/1390 loss:4.0043 train_time:743191ms step_avg:1161.24ms | |
step:650/1390 val_loss:3.7173 train_time:743219ms step_avg:1161.28ms | |
step:651/1390 loss:3.6854 train_time:744434ms step_avg:1161.36ms | |
step:652/1390 loss:3.6928 train_time:745632ms step_avg:1161.42ms | |
step:653/1390 loss:3.7283 train_time:746833ms step_avg:1161.48ms | |
step:654/1390 loss:3.7806 train_time:748032ms step_avg:1161.54ms | |
step:655/1390 loss:3.7611 train_time:749228ms step_avg:1161.59ms | |
step:656/1390 loss:3.8379 train_time:750429ms step_avg:1161.66ms | |
step:657/1390 loss:3.7548 train_time:751631ms step_avg:1161.72ms | |
step:658/1390 loss:3.8808 train_time:752829ms step_avg:1161.77ms | |
step:659/1390 loss:3.8783 train_time:754027ms step_avg:1161.83ms | |
step:660/1390 loss:3.9210 train_time:755224ms step_avg:1161.88ms | |
step:661/1390 loss:3.9205 train_time:756416ms step_avg:1161.93ms | |
step:662/1390 loss:3.9488 train_time:757609ms step_avg:1161.98ms | |
step:663/1390 loss:3.8520 train_time:758810ms step_avg:1162.04ms | |
step:664/1390 loss:3.8670 train_time:760009ms step_avg:1162.09ms | |
step:665/1390 loss:3.9008 train_time:761214ms step_avg:1162.16ms | |
step:666/1390 loss:3.9649 train_time:762407ms step_avg:1162.21ms | |
step:667/1390 loss:3.9817 train_time:763620ms step_avg:1162.28ms | |
step:668/1390 loss:3.9809 train_time:764825ms step_avg:1162.35ms | |
step:669/1390 loss:3.9770 train_time:766031ms step_avg:1162.41ms | |
step:670/1390 loss:3.9692 train_time:767224ms step_avg:1162.46ms | |
step:671/1390 loss:3.9589 train_time:768414ms step_avg:1162.50ms | |
step:672/1390 loss:4.0455 train_time:769609ms step_avg:1162.55ms | |
step:673/1390 loss:3.9824 train_time:770809ms step_avg:1162.61ms | |
step:674/1390 loss:3.9820 train_time:772012ms step_avg:1162.67ms | |
step:675/1390 loss:4.0699 train_time:773201ms step_avg:1162.71ms | |
step:675/1390 val_loss:3.7014 train_time:773229ms step_avg:1162.75ms | |
step:676/1390 loss:3.7068 train_time:774452ms step_avg:1162.84ms | |
step:677/1390 loss:3.6988 train_time:775650ms step_avg:1162.89ms | |
step:678/1390 loss:3.7259 train_time:776855ms step_avg:1162.96ms | |
step:679/1390 loss:3.7496 train_time:778050ms step_avg:1163.00ms | |
step:680/1390 loss:3.7839 train_time:779241ms step_avg:1163.05ms | |
step:681/1390 loss:3.7202 train_time:780436ms step_avg:1163.09ms | |
step:682/1390 loss:3.8127 train_time:781634ms step_avg:1163.15ms | |
step:683/1390 loss:3.8935 train_time:782843ms step_avg:1163.21ms | |
step:684/1390 loss:3.8372 train_time:784049ms step_avg:1163.28ms | |
step:685/1390 loss:3.8887 train_time:785247ms step_avg:1163.33ms | |
step:686/1390 loss:3.9164 train_time:786441ms step_avg:1163.37ms | |
step:687/1390 loss:3.8317 train_time:787646ms step_avg:1163.44ms | |
step:688/1390 loss:3.8426 train_time:788854ms step_avg:1163.50ms | |
step:689/1390 loss:3.8751 train_time:790060ms step_avg:1163.56ms | |
step:690/1390 loss:3.9111 train_time:791269ms step_avg:1163.63ms | |
step:691/1390 loss:3.9342 train_time:792473ms step_avg:1163.69ms | |
step:692/1390 loss:3.9075 train_time:793681ms step_avg:1163.76ms | |
step:693/1390 loss:3.9553 train_time:794887ms step_avg:1163.82ms | |
step:694/1390 loss:3.9335 train_time:796083ms step_avg:1163.86ms | |
step:695/1390 loss:3.8912 train_time:797287ms step_avg:1163.92ms | |
step:696/1390 loss:4.0208 train_time:798489ms step_avg:1163.98ms | |
step:697/1390 loss:3.9211 train_time:799685ms step_avg:1164.02ms | |
step:698/1390 loss:3.9714 train_time:800880ms step_avg:1164.07ms | |
step:699/1390 loss:4.0176 train_time:802080ms step_avg:1164.12ms | |
step:700/1390 loss:3.9528 train_time:803270ms step_avg:1164.16ms | |
step:700/1390 val_loss:3.6889 train_time:803299ms step_avg:1164.20ms | |
step:701/1390 loss:3.6789 train_time:804498ms step_avg:1164.25ms | |
step:702/1390 loss:3.6832 train_time:805690ms step_avg:1164.29ms | |
step:703/1390 loss:3.6283 train_time:806889ms step_avg:1164.34ms | |
step:704/1390 loss:3.7252 train_time:808095ms step_avg:1164.40ms | |
step:705/1390 loss:3.6516 train_time:809306ms step_avg:1164.47ms | |
step:706/1390 loss:3.7189 train_time:810498ms step_avg:1164.51ms | |
step:707/1390 loss:3.8047 train_time:811707ms step_avg:1164.57ms | |
step:708/1390 loss:3.8075 train_time:812909ms step_avg:1164.63ms | |
step:709/1390 loss:3.8015 train_time:814111ms step_avg:1164.68ms | |
step:710/1390 loss:3.7995 train_time:815318ms step_avg:1164.74ms | |
step:711/1390 loss:3.8239 train_time:816522ms step_avg:1164.80ms | |
step:712/1390 loss:3.7965 train_time:817722ms step_avg:1164.85ms | |
step:713/1390 loss:3.9011 train_time:818919ms step_avg:1164.89ms | |
step:714/1390 loss:3.7731 train_time:820125ms step_avg:1164.95ms | |
step:715/1390 loss:3.9758 train_time:821324ms step_avg:1165.00ms | |
step:716/1390 loss:3.8776 train_time:822524ms step_avg:1165.05ms | |
step:717/1390 loss:3.9374 train_time:823729ms step_avg:1165.10ms | |
step:718/1390 loss:3.9413 train_time:824926ms step_avg:1165.15ms | |
step:719/1390 loss:3.9451 train_time:826123ms step_avg:1165.19ms | |
step:720/1390 loss:3.9604 train_time:827320ms step_avg:1165.24ms | |
step:721/1390 loss:3.9509 train_time:828514ms step_avg:1165.28ms | |
step:722/1390 loss:3.9563 train_time:829730ms step_avg:1165.35ms | |
step:723/1390 loss:4.0111 train_time:830948ms step_avg:1165.42ms | |
step:724/1390 loss:3.9199 train_time:832169ms step_avg:1165.50ms | |
step:725/1390 loss:3.9358 train_time:833395ms step_avg:1165.59ms | |
step:725/1390 val_loss:3.6739 train_time:833426ms step_avg:1165.63ms | |
step:726/1390 loss:3.7005 train_time:834646ms step_avg:1165.71ms | |
step:727/1390 loss:3.6638 train_time:835858ms step_avg:1165.77ms | |
step:728/1390 loss:3.6999 train_time:837067ms step_avg:1165.83ms | |
step:729/1390 loss:3.7485 train_time:838278ms step_avg:1165.89ms | |
step:730/1390 loss:3.7913 train_time:839492ms step_avg:1165.96ms | |
step:731/1390 loss:3.7805 train_time:840697ms step_avg:1166.02ms | |
step:732/1390 loss:3.7915 train_time:841908ms step_avg:1166.08ms | |
step:733/1390 loss:3.8136 train_time:843111ms step_avg:1166.13ms | |
step:734/1390 loss:3.8902 train_time:844321ms step_avg:1166.19ms | |
step:735/1390 loss:3.8131 train_time:845532ms step_avg:1166.25ms | |
step:736/1390 loss:3.8223 train_time:846749ms step_avg:1166.32ms | |
step:737/1390 loss:3.8760 train_time:847967ms step_avg:1166.39ms | |
step:738/1390 loss:3.8561 train_time:849170ms step_avg:1166.44ms | |
step:739/1390 loss:3.8741 train_time:850369ms step_avg:1166.49ms | |
step:740/1390 loss:3.9965 train_time:851585ms step_avg:1166.56ms | |
step:741/1390 loss:3.8753 train_time:852795ms step_avg:1166.61ms | |
step:742/1390 loss:3.8675 train_time:854009ms step_avg:1166.68ms | |
step:743/1390 loss:3.9325 train_time:855219ms step_avg:1166.74ms | |
step:744/1390 loss:3.8151 train_time:856429ms step_avg:1166.80ms | |
step:745/1390 loss:3.9061 train_time:857644ms step_avg:1166.86ms | |
step:746/1390 loss:3.9416 train_time:858848ms step_avg:1166.91ms | |
step:747/1390 loss:3.9207 train_time:860059ms step_avg:1166.97ms | |
step:748/1390 loss:3.9720 train_time:861276ms step_avg:1167.04ms | |
step:749/1390 loss:3.9019 train_time:862488ms step_avg:1167.10ms | |
step:750/1390 loss:3.9852 train_time:863700ms step_avg:1167.16ms | |
step:750/1390 val_loss:3.6623 train_time:863729ms step_avg:1167.20ms | |
step:751/1390 loss:3.6450 train_time:864952ms step_avg:1167.28ms | |
step:752/1390 loss:3.6926 train_time:866158ms step_avg:1167.33ms | |
step:753/1390 loss:3.6970 train_time:867370ms step_avg:1167.39ms | |
step:754/1390 loss:3.7985 train_time:868588ms step_avg:1167.46ms | |
step:755/1390 loss:3.7598 train_time:869795ms step_avg:1167.51ms | |
step:756/1390 loss:3.8181 train_time:871007ms step_avg:1167.57ms | |
step:757/1390 loss:3.7562 train_time:872225ms step_avg:1167.64ms | |
step:758/1390 loss:3.7872 train_time:873424ms step_avg:1167.68ms | |
step:759/1390 loss:3.7516 train_time:874636ms step_avg:1167.74ms | |
step:760/1390 loss:3.8137 train_time:875845ms step_avg:1167.79ms | |
step:761/1390 loss:3.7758 train_time:877084ms step_avg:1167.89ms | |
step:762/1390 loss:3.7822 train_time:878288ms step_avg:1167.94ms | |
step:763/1390 loss:3.8547 train_time:879495ms step_avg:1167.99ms | |
step:764/1390 loss:3.8288 train_time:880700ms step_avg:1168.04ms | |
step:765/1390 loss:3.9825 train_time:881917ms step_avg:1168.10ms | |
step:766/1390 loss:3.9508 train_time:883131ms step_avg:1168.16ms | |
step:767/1390 loss:3.9054 train_time:884345ms step_avg:1168.22ms | |
step:768/1390 loss:3.9729 train_time:885569ms step_avg:1168.30ms | |
step:769/1390 loss:4.0603 train_time:886782ms step_avg:1168.36ms | |
step:770/1390 loss:3.8987 train_time:888002ms step_avg:1168.42ms | |
step:771/1390 loss:3.9659 train_time:889203ms step_avg:1168.47ms | |
step:772/1390 loss:3.8783 train_time:890417ms step_avg:1168.53ms | |
step:773/1390 loss:3.9414 train_time:891628ms step_avg:1168.58ms | |
step:774/1390 loss:3.9633 train_time:892839ms step_avg:1168.64ms | |
step:775/1390 loss:3.9718 train_time:894033ms step_avg:1168.67ms | |
step:775/1390 val_loss:3.6517 train_time:894062ms step_avg:1168.71ms | |
step:776/1390 loss:3.6172 train_time:895285ms step_avg:1168.78ms | |
step:777/1390 loss:3.6552 train_time:896493ms step_avg:1168.83ms | |
step:778/1390 loss:3.7252 train_time:897706ms step_avg:1168.89ms | |
step:779/1390 loss:3.6853 train_time:898920ms step_avg:1168.95ms | |
step:780/1390 loss:3.7129 train_time:900128ms step_avg:1169.00ms | |
step:781/1390 loss:3.7139 train_time:901337ms step_avg:1169.05ms | |
step:782/1390 loss:3.7527 train_time:902547ms step_avg:1169.10ms | |
step:783/1390 loss:3.7398 train_time:903744ms step_avg:1169.14ms | |
step:784/1390 loss:3.7600 train_time:904952ms step_avg:1169.19ms | |
step:785/1390 loss:3.7243 train_time:906167ms step_avg:1169.25ms | |
step:786/1390 loss:3.8000 train_time:907376ms step_avg:1169.30ms | |
step:787/1390 loss:3.8179 train_time:908600ms step_avg:1169.37ms | |
step:788/1390 loss:3.8016 train_time:909804ms step_avg:1169.41ms | |
step:789/1390 loss:3.8905 train_time:911026ms step_avg:1169.48ms | |
step:790/1390 loss:3.8499 train_time:912229ms step_avg:1169.52ms | |
step:791/1390 loss:3.9495 train_time:913440ms step_avg:1169.58ms | |
step:792/1390 loss:3.9223 train_time:914642ms step_avg:1169.62ms | |
step:793/1390 loss:3.8643 train_time:915853ms step_avg:1169.67ms | |
step:794/1390 loss:4.2015 train_time:917074ms step_avg:1169.74ms | |
step:795/1390 loss:3.8789 train_time:918276ms step_avg:1169.78ms | |
step:796/1390 loss:3.9126 train_time:919480ms step_avg:1169.82ms | |
step:797/1390 loss:3.9167 train_time:920694ms step_avg:1169.88ms | |
step:798/1390 loss:3.9739 train_time:921906ms step_avg:1169.93ms | |
step:799/1390 loss:3.9305 train_time:923108ms step_avg:1169.97ms | |
step:800/1390 loss:3.9385 train_time:924309ms step_avg:1170.01ms | |
step:800/1390 val_loss:3.6412 train_time:924338ms step_avg:1170.05ms | |
step:801/1390 loss:3.8148 train_time:925557ms step_avg:1170.11ms | |
step:802/1390 loss:3.6470 train_time:926758ms step_avg:1170.15ms | |
step:803/1390 loss:3.6753 train_time:927961ms step_avg:1170.19ms | |
step:804/1390 loss:3.6879 train_time:929176ms step_avg:1170.25ms | |
step:805/1390 loss:3.7158 train_time:930375ms step_avg:1170.28ms | |
step:806/1390 loss:3.7237 train_time:931577ms step_avg:1170.32ms | |
step:807/1390 loss:3.7178 train_time:932777ms step_avg:1170.36ms | |
step:808/1390 loss:3.7469 train_time:933983ms step_avg:1170.40ms | |
step:809/1390 loss:3.7575 train_time:935195ms step_avg:1170.46ms | |
step:810/1390 loss:3.7928 train_time:936397ms step_avg:1170.50ms | |
step:811/1390 loss:3.7917 train_time:937606ms step_avg:1170.54ms | |
step:812/1390 loss:3.9044 train_time:938812ms step_avg:1170.59ms | |
step:813/1390 loss:3.8787 train_time:940016ms step_avg:1170.63ms | |
step:814/1390 loss:3.8176 train_time:941227ms step_avg:1170.68ms | |
step:815/1390 loss:3.8544 train_time:942445ms step_avg:1170.74ms | |
step:816/1390 loss:3.8429 train_time:943659ms step_avg:1170.79ms | |
step:817/1390 loss:3.8893 train_time:944877ms step_avg:1170.85ms | |
step:818/1390 loss:3.8249 train_time:946094ms step_avg:1170.91ms | |
step:819/1390 loss:3.8272 train_time:947310ms step_avg:1170.96ms | |
step:820/1390 loss:3.8590 train_time:948527ms step_avg:1171.02ms | |
step:821/1390 loss:3.8996 train_time:949744ms step_avg:1171.08ms | |
step:822/1390 loss:3.8371 train_time:950966ms step_avg:1171.14ms | |
step:823/1390 loss:3.8334 train_time:952185ms step_avg:1171.20ms | |
step:824/1390 loss:3.9037 train_time:953393ms step_avg:1171.24ms | |
step:825/1390 loss:3.7982 train_time:954631ms step_avg:1171.33ms | |
step:825/1390 val_loss:3.6294 train_time:954662ms step_avg:1171.36ms | |
step:826/1390 loss:3.6099 train_time:955896ms step_avg:1171.44ms | |
step:827/1390 loss:3.6532 train_time:957126ms step_avg:1171.51ms | |
step:828/1390 loss:3.6406 train_time:958341ms step_avg:1171.57ms | |
step:829/1390 loss:3.6929 train_time:959566ms step_avg:1171.63ms | |
step:830/1390 loss:3.7282 train_time:960807ms step_avg:1171.72ms | |
step:831/1390 loss:3.7999 train_time:962039ms step_avg:1171.79ms | |
step:832/1390 loss:3.7370 train_time:963262ms step_avg:1171.85ms | |
step:833/1390 loss:3.7248 train_time:964493ms step_avg:1171.92ms | |
step:834/1390 loss:3.7406 train_time:965718ms step_avg:1171.99ms | |
step:835/1390 loss:3.7215 train_time:966936ms step_avg:1172.04ms | |
step:836/1390 loss:3.7618 train_time:968149ms step_avg:1172.09ms | |
step:837/1390 loss:3.7589 train_time:969376ms step_avg:1172.16ms | |
step:838/1390 loss:3.8477 train_time:970595ms step_avg:1172.22ms | |
step:839/1390 loss:3.8023 train_time:971810ms step_avg:1172.27ms | |
step:840/1390 loss:3.8024 train_time:973029ms step_avg:1172.32ms | |
step:841/1390 loss:3.8404 train_time:974248ms step_avg:1172.38ms | |
step:842/1390 loss:3.8015 train_time:975474ms step_avg:1172.44ms | |
step:843/1390 loss:3.8419 train_time:976698ms step_avg:1172.51ms | |
step:844/1390 loss:3.8890 train_time:977906ms step_avg:1172.55ms | |
step:845/1390 loss:3.8749 train_time:979130ms step_avg:1172.61ms | |
step:846/1390 loss:3.8556 train_time:980353ms step_avg:1172.67ms | |
step:847/1390 loss:3.8556 train_time:981588ms step_avg:1172.74ms | |
step:848/1390 loss:3.9173 train_time:982817ms step_avg:1172.81ms | |
step:849/1390 loss:3.8722 train_time:984039ms step_avg:1172.87ms | |
step:850/1390 loss:3.9513 train_time:985264ms step_avg:1172.93ms | |
step:850/1390 val_loss:3.6171 train_time:985295ms step_avg:1172.97ms | |
step:851/1390 loss:3.6390 train_time:986529ms step_avg:1173.04ms | |
step:852/1390 loss:3.6514 train_time:987760ms step_avg:1173.11ms | |
step:853/1390 loss:3.6595 train_time:988973ms step_avg:1173.16ms | |
step:854/1390 loss:3.6337 train_time:990184ms step_avg:1173.20ms | |
step:855/1390 loss:3.7038 train_time:991396ms step_avg:1173.25ms | |
step:856/1390 loss:3.7379 train_time:992611ms step_avg:1173.30ms | |
step:857/1390 loss:3.6467 train_time:993849ms step_avg:1173.38ms | |
step:858/1390 loss:3.7025 train_time:995076ms step_avg:1173.44ms | |
step:859/1390 loss:3.7299 train_time:996296ms step_avg:1173.49ms | |
step:860/1390 loss:3.7164 train_time:997513ms step_avg:1173.54ms | |
step:861/1390 loss:3.7321 train_time:998737ms step_avg:1173.60ms | |
step:862/1390 loss:3.7147 train_time:999966ms step_avg:1173.67ms | |
step:863/1390 loss:3.7782 train_time:1001190ms step_avg:1173.73ms | |
step:864/1390 loss:3.7934 train_time:1002412ms step_avg:1173.78ms | |
step:865/1390 loss:3.9087 train_time:1003651ms step_avg:1173.86ms | |
step:866/1390 loss:3.8350 train_time:1004886ms step_avg:1173.93ms | |
step:867/1390 loss:3.8003 train_time:1006107ms step_avg:1173.99ms | |
step:868/1390 loss:3.7590 train_time:1007335ms step_avg:1174.05ms | |
step:869/1390 loss:3.7965 train_time:1008564ms step_avg:1174.11ms | |
step:870/1390 loss:3.8412 train_time:1009791ms step_avg:1174.18ms | |
step:871/1390 loss:3.8098 train_time:1011014ms step_avg:1174.23ms | |
step:872/1390 loss:3.8663 train_time:1012241ms step_avg:1174.29ms | |
step:873/1390 loss:3.8486 train_time:1013455ms step_avg:1174.34ms | |
step:874/1390 loss:3.8011 train_time:1014677ms step_avg:1174.39ms | |
step:875/1390 loss:3.8502 train_time:1015909ms step_avg:1174.46ms | |
step:875/1390 val_loss:3.6029 train_time:1015940ms step_avg:1174.50ms | |
step:876/1390 loss:3.6637 train_time:1017164ms step_avg:1174.55ms | |
step:877/1390 loss:3.6040 train_time:1018383ms step_avg:1174.61ms | |
step:878/1390 loss:3.6815 train_time:1019603ms step_avg:1174.66ms | |
step:879/1390 loss:3.6366 train_time:1020817ms step_avg:1174.70ms | |
step:880/1390 loss:3.6175 train_time:1022028ms step_avg:1174.74ms | |
step:881/1390 loss:3.6547 train_time:1023237ms step_avg:1174.78ms | |
step:882/1390 loss:3.6971 train_time:1024457ms step_avg:1174.84ms | |
step:883/1390 loss:3.7047 train_time:1025672ms step_avg:1174.88ms | |
step:884/1390 loss:3.7070 train_time:1026892ms step_avg:1174.93ms | |
step:885/1390 loss:3.6825 train_time:1028116ms step_avg:1174.99ms | |
step:886/1390 loss:3.7289 train_time:1029318ms step_avg:1175.02ms | |
step:887/1390 loss:3.8674 train_time:1030539ms step_avg:1175.07ms | |
step:888/1390 loss:3.7561 train_time:1031759ms step_avg:1175.12ms | |
step:889/1390 loss:3.7064 train_time:1032972ms step_avg:1175.17ms | |
step:890/1390 loss:3.6961 train_time:1034180ms step_avg:1175.20ms | |
step:891/1390 loss:3.7677 train_time:1035410ms step_avg:1175.27ms | |
step:892/1390 loss:3.8081 train_time:1036625ms step_avg:1175.31ms | |
step:893/1390 loss:3.7178 train_time:1037840ms step_avg:1175.36ms | |
step:894/1390 loss:3.8004 train_time:1039064ms step_avg:1175.41ms | |
step:895/1390 loss:3.8116 train_time:1040292ms step_avg:1175.47ms | |
step:896/1390 loss:3.8215 train_time:1041506ms step_avg:1175.52ms | |
step:897/1390 loss:3.8160 train_time:1042734ms step_avg:1175.57ms | |
step:898/1390 loss:3.8512 train_time:1043953ms step_avg:1175.62ms | |
step:899/1390 loss:3.8262 train_time:1045160ms step_avg:1175.66ms | |
step:900/1390 loss:3.7679 train_time:1046375ms step_avg:1175.70ms | |
step:900/1390 val_loss:3.5870 train_time:1046404ms step_avg:1175.74ms | |
step:901/1390 loss:3.5989 train_time:1047619ms step_avg:1175.78ms | |
step:902/1390 loss:3.8905 train_time:1048844ms step_avg:1175.83ms | |
step:903/1390 loss:3.5975 train_time:1050063ms step_avg:1175.88ms | |
step:904/1390 loss:3.6102 train_time:1051276ms step_avg:1175.92ms | |
step:905/1390 loss:3.6858 train_time:1052484ms step_avg:1175.96ms | |
step:906/1390 loss:3.6930 train_time:1053708ms step_avg:1176.01ms | |
step:907/1390 loss:3.6258 train_time:1054923ms step_avg:1176.06ms | |
step:908/1390 loss:3.7182 train_time:1056133ms step_avg:1176.09ms | |
step:909/1390 loss:3.8288 train_time:1057350ms step_avg:1176.14ms | |
step:910/1390 loss:3.6470 train_time:1058579ms step_avg:1176.20ms | |
step:911/1390 loss:3.7222 train_time:1059791ms step_avg:1176.24ms | |
step:912/1390 loss:3.6746 train_time:1061006ms step_avg:1176.28ms | |
step:913/1390 loss:3.7138 train_time:1062218ms step_avg:1176.32ms | |
step:914/1390 loss:3.7910 train_time:1063444ms step_avg:1176.38ms | |
step:915/1390 loss:3.7534 train_time:1064680ms step_avg:1176.44ms | |
step:916/1390 loss:3.8472 train_time:1065905ms step_avg:1176.50ms | |
step:917/1390 loss:3.7566 train_time:1067116ms step_avg:1176.53ms | |
step:918/1390 loss:3.8914 train_time:1068345ms step_avg:1176.59ms | |
step:919/1390 loss:3.7679 train_time:1069568ms step_avg:1176.64ms | |
step:920/1390 loss:3.8253 train_time:1070785ms step_avg:1176.69ms | |
step:921/1390 loss:3.7350 train_time:1072007ms step_avg:1176.74ms | |
step:922/1390 loss:3.7670 train_time:1073219ms step_avg:1176.77ms | |
step:923/1390 loss:3.7488 train_time:1074445ms step_avg:1176.83ms | |
step:924/1390 loss:3.8139 train_time:1075681ms step_avg:1176.89ms | |
step:925/1390 loss:3.8042 train_time:1076897ms step_avg:1176.94ms | |
step:925/1390 val_loss:3.5728 train_time:1076926ms step_avg:1176.97ms | |
step:926/1390 loss:3.6324 train_time:1078160ms step_avg:1177.03ms | |
step:927/1390 loss:3.5637 train_time:1079385ms step_avg:1177.08ms | |
step:928/1390 loss:3.5943 train_time:1080612ms step_avg:1177.14ms | |
step:929/1390 loss:3.7331 train_time:1081841ms step_avg:1177.19ms | |
step:930/1390 loss:3.5871 train_time:1083062ms step_avg:1177.24ms | |
step:931/1390 loss:3.6133 train_time:1084281ms step_avg:1177.29ms | |
step:932/1390 loss:3.7891 train_time:1085511ms step_avg:1177.34ms | |
step:933/1390 loss:3.6206 train_time:1086754ms step_avg:1177.42ms | |
step:934/1390 loss:3.7772 train_time:1087989ms step_avg:1177.48ms | |
step:935/1390 loss:3.7051 train_time:1089212ms step_avg:1177.53ms | |
step:936/1390 loss:3.6658 train_time:1090447ms step_avg:1177.59ms | |
step:937/1390 loss:3.6432 train_time:1091679ms step_avg:1177.65ms | |
step:938/1390 loss:3.7216 train_time:1092901ms step_avg:1177.70ms | |
step:939/1390 loss:3.6812 train_time:1094137ms step_avg:1177.76ms | |
step:940/1390 loss:3.6776 train_time:1095367ms step_avg:1177.81ms | |
step:941/1390 loss:3.8232 train_time:1096594ms step_avg:1177.87ms | |
step:942/1390 loss:3.7117 train_time:1097819ms step_avg:1177.92ms | |
step:943/1390 loss:3.7811 train_time:1099056ms step_avg:1177.98ms | |
step:944/1390 loss:3.7659 train_time:1100292ms step_avg:1178.04ms | |
step:945/1390 loss:3.7467 train_time:1101526ms step_avg:1178.10ms | |
step:946/1390 loss:3.9440 train_time:1102755ms step_avg:1178.16ms | |
step:947/1390 loss:3.7926 train_time:1103997ms step_avg:1178.23ms | |
step:948/1390 loss:3.7276 train_time:1105241ms step_avg:1178.30ms | |
step:949/1390 loss:3.7662 train_time:1106468ms step_avg:1178.35ms | |
step:950/1390 loss:3.7730 train_time:1107710ms step_avg:1178.42ms | |
step:950/1390 val_loss:3.5567 train_time:1107740ms step_avg:1178.45ms | |
step:951/1390 loss:3.5640 train_time:1109006ms step_avg:1178.54ms | |
step:952/1390 loss:3.6207 train_time:1110239ms step_avg:1178.60ms | |
step:953/1390 loss:3.5559 train_time:1111465ms step_avg:1178.65ms | |
step:954/1390 loss:3.6010 train_time:1112688ms step_avg:1178.69ms | |
step:955/1390 loss:3.6241 train_time:1113931ms step_avg:1178.76ms | |
step:956/1390 loss:3.6359 train_time:1115150ms step_avg:1178.81ms | |
step:957/1390 loss:3.6942 train_time:1116381ms step_avg:1178.86ms | |
step:958/1390 loss:3.6806 train_time:1117606ms step_avg:1178.91ms | |
step:959/1390 loss:3.6735 train_time:1118837ms step_avg:1178.96ms | |
step:960/1390 loss:3.6212 train_time:1120064ms step_avg:1179.01ms | |
step:961/1390 loss:3.6565 train_time:1121297ms step_avg:1179.07ms | |
step:962/1390 loss:3.6466 train_time:1122528ms step_avg:1179.13ms | |
step:963/1390 loss:3.6640 train_time:1123755ms step_avg:1179.18ms | |
step:964/1390 loss:3.6508 train_time:1124994ms step_avg:1179.24ms | |
step:965/1390 loss:3.7052 train_time:1126223ms step_avg:1179.29ms | |
step:966/1390 loss:3.7247 train_time:1127452ms step_avg:1179.34ms | |
step:967/1390 loss:3.7379 train_time:1128670ms step_avg:1179.38ms | |
step:968/1390 loss:3.7058 train_time:1129899ms step_avg:1179.43ms | |
step:969/1390 loss:3.7067 train_time:1131115ms step_avg:1179.47ms | |
step:970/1390 loss:3.7105 train_time:1132334ms step_avg:1179.51ms | |
step:971/1390 loss:3.7030 train_time:1133558ms step_avg:1179.56ms | |
step:972/1390 loss:3.7253 train_time:1134788ms step_avg:1179.61ms | |
step:973/1390 loss:3.7894 train_time:1136002ms step_avg:1179.65ms | |
step:974/1390 loss:3.7349 train_time:1137224ms step_avg:1179.69ms | |
step:975/1390 loss:3.7792 train_time:1138438ms step_avg:1179.73ms | |
step:975/1390 val_loss:3.5438 train_time:1138467ms step_avg:1179.76ms | |
step:976/1390 loss:3.5787 train_time:1139699ms step_avg:1179.81ms | |
step:977/1390 loss:3.5539 train_time:1140930ms step_avg:1179.87ms | |
step:978/1390 loss:3.5331 train_time:1142146ms step_avg:1179.90ms | |
step:979/1390 loss:3.6016 train_time:1143370ms step_avg:1179.95ms | |
step:980/1390 loss:3.5906 train_time:1144584ms step_avg:1179.98ms | |
step:981/1390 loss:3.6195 train_time:1145812ms step_avg:1180.03ms | |
step:982/1390 loss:3.6331 train_time:1147037ms step_avg:1180.08ms | |
step:983/1390 loss:3.6193 train_time:1148271ms step_avg:1180.13ms | |
step:984/1390 loss:3.7200 train_time:1149502ms step_avg:1180.19ms | |
step:985/1390 loss:3.6939 train_time:1150727ms step_avg:1180.23ms | |
step:986/1390 loss:3.6441 train_time:1151961ms step_avg:1180.29ms | |
step:987/1390 loss:3.6734 train_time:1153190ms step_avg:1180.34ms | |
step:988/1390 loss:3.6070 train_time:1154415ms step_avg:1180.38ms | |
step:989/1390 loss:3.6441 train_time:1155637ms step_avg:1180.43ms | |
step:990/1390 loss:3.6645 train_time:1156873ms step_avg:1180.48ms | |
step:991/1390 loss:3.6790 train_time:1158109ms step_avg:1180.54ms | |
step:992/1390 loss:3.6739 train_time:1159352ms step_avg:1180.60ms | |
step:993/1390 loss:3.6812 train_time:1160585ms step_avg:1180.66ms | |
step:994/1390 loss:3.6617 train_time:1161809ms step_avg:1180.70ms | |
step:995/1390 loss:3.7531 train_time:1163036ms step_avg:1180.75ms | |
step:996/1390 loss:3.6671 train_time:1164259ms step_avg:1180.79ms | |
step:997/1390 loss:3.7203 train_time:1165472ms step_avg:1180.82ms | |
step:998/1390 loss:3.7345 train_time:1166701ms step_avg:1180.87ms | |
step:999/1390 loss:3.7554 train_time:1167922ms step_avg:1180.91ms | |
step:1000/1390 loss:3.7421 train_time:1169154ms step_avg:1180.96ms | |
step:1000/1390 val_loss:3.5312 train_time:1169185ms step_avg:1181.00ms | |
step:1001/1390 loss:3.5178 train_time:1170423ms step_avg:1181.05ms | |
step:1002/1390 loss:3.5672 train_time:1171662ms step_avg:1181.11ms | |
step:1003/1390 loss:3.5739 train_time:1172901ms step_avg:1181.17ms | |
step:1004/1390 loss:3.6030 train_time:1174148ms step_avg:1181.24ms | |
step:1005/1390 loss:3.5816 train_time:1175364ms step_avg:1181.27ms | |
step:1006/1390 loss:3.5478 train_time:1176583ms step_avg:1181.31ms | |
step:1007/1390 loss:3.6170 train_time:1177810ms step_avg:1181.35ms | |
step:1008/1390 loss:3.6583 train_time:1179038ms step_avg:1181.40ms | |
step:1009/1390 loss:3.6605 train_time:1180266ms step_avg:1181.45ms | |
step:1010/1390 loss:3.5984 train_time:1181506ms step_avg:1181.51ms | |
step:1011/1390 loss:3.6461 train_time:1182727ms step_avg:1181.55ms | |
step:1012/1390 loss:3.5555 train_time:1183951ms step_avg:1181.59ms | |
step:1013/1390 loss:3.6595 train_time:1185176ms step_avg:1181.63ms | |
step:1014/1390 loss:3.6711 train_time:1186400ms step_avg:1181.67ms | |
step:1015/1390 loss:3.6431 train_time:1187617ms step_avg:1181.71ms | |
step:1016/1390 loss:3.6553 train_time:1188841ms step_avg:1181.75ms | |
step:1017/1390 loss:3.6322 train_time:1190064ms step_avg:1181.79ms | |
step:1018/1390 loss:3.6741 train_time:1191303ms step_avg:1181.85ms | |
step:1019/1390 loss:3.6876 train_time:1192534ms step_avg:1181.90ms | |
step:1020/1390 loss:3.6847 train_time:1193758ms step_avg:1181.94ms | |
step:1021/1390 loss:3.6947 train_time:1194976ms step_avg:1181.97ms | |
step:1022/1390 loss:3.7100 train_time:1196208ms step_avg:1182.02ms | |
step:1023/1390 loss:3.7012 train_time:1197420ms step_avg:1182.05ms | |
step:1024/1390 loss:3.6986 train_time:1198650ms step_avg:1182.10ms | |
step:1025/1390 loss:3.6807 train_time:1199869ms step_avg:1182.14ms | |
step:1025/1390 val_loss:3.5196 train_time:1199899ms step_avg:1182.17ms | |
step:1026/1390 loss:3.5456 train_time:1201120ms step_avg:1182.21ms | |
step:1027/1390 loss:3.5448 train_time:1202359ms step_avg:1182.26ms | |
step:1028/1390 loss:3.5595 train_time:1203596ms step_avg:1182.31ms | |
step:1029/1390 loss:3.6216 train_time:1204818ms step_avg:1182.35ms | |
step:1030/1390 loss:3.5669 train_time:1206036ms step_avg:1182.39ms | |
step:1031/1390 loss:3.5362 train_time:1207254ms step_avg:1182.42ms | |
step:1032/1390 loss:3.6054 train_time:1208493ms step_avg:1182.48ms | |
step:1033/1390 loss:3.6017 train_time:1209732ms step_avg:1182.53ms | |
step:1034/1390 loss:3.5913 train_time:1210976ms step_avg:1182.59ms | |
step:1035/1390 loss:3.5942 train_time:1212208ms step_avg:1182.64ms | |
step:1036/1390 loss:3.8219 train_time:1213443ms step_avg:1182.69ms | |
step:1037/1390 loss:3.6230 train_time:1214673ms step_avg:1182.74ms | |
step:1038/1390 loss:3.6420 train_time:1215906ms step_avg:1182.79ms | |
step:1039/1390 loss:3.6494 train_time:1217135ms step_avg:1182.83ms | |
step:1040/1390 loss:3.6148 train_time:1218370ms step_avg:1182.88ms | |
step:1041/1390 loss:3.6299 train_time:1219596ms step_avg:1182.93ms | |
step:1042/1390 loss:3.6898 train_time:1220823ms step_avg:1182.97ms | |
step:1043/1390 loss:3.6567 train_time:1222065ms step_avg:1183.02ms | |
step:1044/1390 loss:3.5736 train_time:1223311ms step_avg:1183.09ms | |
step:1045/1390 loss:3.6543 train_time:1224553ms step_avg:1183.14ms | |
step:1046/1390 loss:3.6688 train_time:1225771ms step_avg:1183.18ms | |
step:1047/1390 loss:3.7548 train_time:1227007ms step_avg:1183.23ms | |
step:1048/1390 loss:3.6378 train_time:1228242ms step_avg:1183.28ms | |
step:1049/1390 loss:3.6222 train_time:1229466ms step_avg:1183.32ms | |
step:1050/1390 loss:3.7826 train_time:1230701ms step_avg:1183.37ms | |
step:1050/1390 val_loss:3.5081 train_time:1230731ms step_avg:1183.39ms | |
step:1051/1390 loss:3.5352 train_time:1231969ms step_avg:1183.45ms | |
step:1052/1390 loss:3.5203 train_time:1233213ms step_avg:1183.51ms | |
step:1053/1390 loss:3.5551 train_time:1234462ms step_avg:1183.57ms | |
step:1054/1390 loss:3.4891 train_time:1235699ms step_avg:1183.62ms | |
step:1055/1390 loss:3.5221 train_time:1236929ms step_avg:1183.66ms | |
step:1056/1390 loss:3.5210 train_time:1238155ms step_avg:1183.70ms | |
step:1057/1390 loss:3.5853 train_time:1239395ms step_avg:1183.76ms | |
step:1058/1390 loss:3.5634 train_time:1240632ms step_avg:1183.81ms | |
step:1059/1390 loss:3.6143 train_time:1241872ms step_avg:1183.86ms | |
step:1060/1390 loss:3.5134 train_time:1243096ms step_avg:1183.90ms | |
step:1061/1390 loss:3.6589 train_time:1244326ms step_avg:1183.94ms | |
step:1062/1390 loss:3.5923 train_time:1245552ms step_avg:1183.99ms | |
step:1063/1390 loss:3.5728 train_time:1246790ms step_avg:1184.04ms | |
step:1064/1390 loss:3.6354 train_time:1248021ms step_avg:1184.08ms | |
step:1065/1390 loss:3.5666 train_time:1249262ms step_avg:1184.13ms | |
step:1066/1390 loss:3.6043 train_time:1250508ms step_avg:1184.19ms | |
step:1067/1390 loss:3.6277 train_time:1251750ms step_avg:1184.25ms | |
step:1068/1390 loss:3.6454 train_time:1252986ms step_avg:1184.30ms | |
step:1069/1390 loss:3.6429 train_time:1254225ms step_avg:1184.35ms | |
step:1070/1390 loss:3.6525 train_time:1255458ms step_avg:1184.39ms | |
step:1071/1390 loss:3.5805 train_time:1256684ms step_avg:1184.43ms | |
step:1072/1390 loss:3.6747 train_time:1257908ms step_avg:1184.47ms | |
step:1073/1390 loss:3.5769 train_time:1259133ms step_avg:1184.51ms | |
step:1074/1390 loss:3.6705 train_time:1260364ms step_avg:1184.55ms | |
step:1075/1390 loss:3.6344 train_time:1261588ms step_avg:1184.59ms | |
step:1075/1390 val_loss:3.4977 train_time:1261619ms step_avg:1184.62ms | |
step:1076/1390 loss:3.5898 train_time:1262852ms step_avg:1184.66ms | |
step:1077/1390 loss:3.5550 train_time:1264084ms step_avg:1184.71ms | |
step:1078/1390 loss:3.4935 train_time:1265313ms step_avg:1184.75ms | |
step:1079/1390 loss:3.5405 train_time:1266544ms step_avg:1184.79ms | |
step:1080/1390 loss:3.5363 train_time:1267775ms step_avg:1184.84ms | |
step:1081/1390 loss:3.5223 train_time:1269013ms step_avg:1184.89ms | |
step:1082/1390 loss:3.5325 train_time:1270264ms step_avg:1184.95ms | |
step:1083/1390 loss:3.5671 train_time:1271506ms step_avg:1185.00ms | |
step:1084/1390 loss:3.5417 train_time:1272734ms step_avg:1185.04ms | |
step:1085/1390 loss:3.5719 train_time:1273968ms step_avg:1185.09ms | |
step:1086/1390 loss:3.5874 train_time:1275206ms step_avg:1185.14ms | |
step:1087/1390 loss:3.5685 train_time:1276441ms step_avg:1185.18ms | |
step:1088/1390 loss:3.5590 train_time:1277680ms step_avg:1185.23ms | |
step:1089/1390 loss:3.5542 train_time:1278922ms step_avg:1185.28ms | |
step:1090/1390 loss:3.6148 train_time:1280160ms step_avg:1185.33ms | |
step:1091/1390 loss:3.5969 train_time:1281402ms step_avg:1185.39ms | |
step:1092/1390 loss:3.5647 train_time:1282639ms step_avg:1185.43ms | |
step:1093/1390 loss:3.6090 train_time:1283876ms step_avg:1185.48ms | |
step:1094/1390 loss:3.6457 train_time:1285100ms step_avg:1185.52ms | |
step:1095/1390 loss:3.5467 train_time:1286338ms step_avg:1185.57ms | |
step:1096/1390 loss:3.5870 train_time:1287576ms step_avg:1185.61ms | |
step:1097/1390 loss:3.6713 train_time:1288810ms step_avg:1185.66ms | |
step:1098/1390 loss:3.6665 train_time:1290050ms step_avg:1185.71ms | |
step:1099/1390 loss:3.6677 train_time:1291283ms step_avg:1185.75ms | |
step:1100/1390 loss:3.6557 train_time:1292522ms step_avg:1185.80ms | |
step:1100/1390 val_loss:3.4882 train_time:1292553ms step_avg:1185.83ms | |
step:1101/1390 loss:3.5326 train_time:1293801ms step_avg:1185.89ms | |
step:1102/1390 loss:3.5203 train_time:1295042ms step_avg:1185.94ms | |
step:1103/1390 loss:3.5340 train_time:1296281ms step_avg:1185.98ms | |
step:1104/1390 loss:3.5876 train_time:1297514ms step_avg:1186.03ms | |
step:1105/1390 loss:3.6447 train_time:1298749ms step_avg:1186.07ms | |
step:1106/1390 loss:3.5751 train_time:1299976ms step_avg:1186.11ms | |
step:1107/1390 loss:3.5765 train_time:1301214ms step_avg:1186.16ms | |
step:1108/1390 loss:3.5359 train_time:1302440ms step_avg:1186.19ms | |
step:1109/1390 loss:3.5364 train_time:1303683ms step_avg:1186.24ms | |
step:1110/1390 loss:3.5067 train_time:1304920ms step_avg:1186.29ms | |
step:1111/1390 loss:3.4729 train_time:1306147ms step_avg:1186.33ms | |
step:1112/1390 loss:3.5150 train_time:1307370ms step_avg:1186.36ms | |
step:1113/1390 loss:3.6251 train_time:1308609ms step_avg:1186.41ms | |
step:1114/1390 loss:3.5900 train_time:1309838ms step_avg:1186.45ms | |
step:1115/1390 loss:3.6251 train_time:1311075ms step_avg:1186.49ms | |
step:1116/1390 loss:3.6322 train_time:1312309ms step_avg:1186.54ms | |
step:1117/1390 loss:3.6147 train_time:1313566ms step_avg:1186.60ms | |
step:1118/1390 loss:3.5839 train_time:1314786ms step_avg:1186.63ms | |
step:1119/1390 loss:3.5786 train_time:1316003ms step_avg:1186.66ms | |
step:1120/1390 loss:3.6746 train_time:1317235ms step_avg:1186.70ms | |
step:1121/1390 loss:3.6181 train_time:1318460ms step_avg:1186.73ms | |
step:1122/1390 loss:3.5488 train_time:1319687ms step_avg:1186.77ms | |
step:1123/1390 loss:3.5762 train_time:1320911ms step_avg:1186.80ms | |
step:1124/1390 loss:3.6062 train_time:1322136ms step_avg:1186.84ms | |
step:1125/1390 loss:3.6179 train_time:1323387ms step_avg:1186.89ms | |
step:1125/1390 val_loss:3.4797 train_time:1323417ms step_avg:1186.92ms | |
step:1126/1390 loss:3.4699 train_time:1324659ms step_avg:1186.97ms | |
step:1127/1390 loss:3.5549 train_time:1325891ms step_avg:1187.01ms | |
step:1128/1390 loss:3.4944 train_time:1327132ms step_avg:1187.06ms | |
step:1129/1390 loss:3.5069 train_time:1328358ms step_avg:1187.09ms | |
step:1130/1390 loss:3.4959 train_time:1329598ms step_avg:1187.14ms | |
step:1131/1390 loss:3.5258 train_time:1330838ms step_avg:1187.19ms | |
step:1132/1390 loss:3.5355 train_time:1332052ms step_avg:1187.21ms | |
step:1133/1390 loss:3.5843 train_time:1333286ms step_avg:1187.25ms | |
step:1134/1390 loss:3.5508 train_time:1334514ms step_avg:1187.29ms | |
step:1135/1390 loss:3.5740 train_time:1335758ms step_avg:1187.34ms | |
step:1136/1390 loss:3.5299 train_time:1336982ms step_avg:1187.37ms | |
step:1137/1390 loss:3.6417 train_time:1338209ms step_avg:1187.41ms | |
step:1138/1390 loss:3.5269 train_time:1339439ms step_avg:1187.45ms | |
step:1139/1390 loss:3.5655 train_time:1340688ms step_avg:1187.50ms | |
step:1140/1390 loss:3.5581 train_time:1341914ms step_avg:1187.53ms | |
step:1141/1390 loss:3.6164 train_time:1343189ms step_avg:1187.61ms | |
step:1142/1390 loss:3.5613 train_time:1344436ms step_avg:1187.66ms | |
step:1143/1390 loss:3.5551 train_time:1345679ms step_avg:1187.71ms | |
step:1144/1390 loss:3.5625 train_time:1346922ms step_avg:1187.76ms | |
step:1145/1390 loss:3.6185 train_time:1348168ms step_avg:1187.81ms | |
step:1146/1390 loss:3.6636 train_time:1349419ms step_avg:1187.87ms | |
step:1147/1390 loss:3.6118 train_time:1350665ms step_avg:1187.92ms | |
step:1148/1390 loss:3.6539 train_time:1351904ms step_avg:1187.96ms | |
step:1149/1390 loss:3.5579 train_time:1353143ms step_avg:1188.01ms | |
step:1150/1390 loss:3.5433 train_time:1354395ms step_avg:1188.07ms | |
step:1150/1390 val_loss:3.4715 train_time:1354425ms step_avg:1188.09ms | |
step:1151/1390 loss:3.4819 train_time:1355671ms step_avg:1188.14ms | |
step:1152/1390 loss:3.5414 train_time:1356935ms step_avg:1188.21ms | |
step:1153/1390 loss:3.4606 train_time:1358181ms step_avg:1188.26ms | |
step:1154/1390 loss:3.5024 train_time:1359423ms step_avg:1188.31ms | |
step:1155/1390 loss:3.5399 train_time:1360673ms step_avg:1188.36ms | |
step:1156/1390 loss:3.5518 train_time:1361922ms step_avg:1188.41ms | |
step:1157/1390 loss:3.5199 train_time:1363166ms step_avg:1188.46ms | |
step:1158/1390 loss:3.5691 train_time:1364403ms step_avg:1188.50ms | |
step:1159/1390 loss:3.5210 train_time:1365643ms step_avg:1188.55ms | |
step:1160/1390 loss:3.5113 train_time:1366877ms step_avg:1188.59ms | |
step:1161/1390 loss:3.4387 train_time:1368114ms step_avg:1188.63ms | |
step:1162/1390 loss:3.5539 train_time:1369339ms step_avg:1188.66ms | |
step:1163/1390 loss:3.5281 train_time:1370581ms step_avg:1188.71ms | |
step:1164/1390 loss:3.5116 train_time:1371826ms step_avg:1188.76ms | |
step:1165/1390 loss:3.5443 train_time:1373058ms step_avg:1188.79ms | |
step:1166/1390 loss:3.5651 train_time:1374286ms step_avg:1188.83ms | |
step:1167/1390 loss:3.5701 train_time:1375533ms step_avg:1188.88ms | |
step:1168/1390 loss:3.5784 train_time:1376765ms step_avg:1188.92ms | |
step:1169/1390 loss:3.5220 train_time:1377998ms step_avg:1188.95ms | |
step:1170/1390 loss:3.5966 train_time:1379231ms step_avg:1188.99ms | |
step:1171/1390 loss:3.4877 train_time:1380468ms step_avg:1189.03ms | |
step:1172/1390 loss:3.5607 train_time:1381694ms step_avg:1189.07ms | |
step:1173/1390 loss:3.6978 train_time:1382949ms step_avg:1189.12ms | |
step:1174/1390 loss:3.5384 train_time:1384187ms step_avg:1189.16ms | |
step:1175/1390 loss:3.5561 train_time:1385440ms step_avg:1189.22ms | |
step:1175/1390 val_loss:3.4643 train_time:1385469ms step_avg:1189.24ms | |
step:1176/1390 loss:3.4285 train_time:1386728ms step_avg:1189.30ms | |
step:1177/1390 loss:3.4853 train_time:1387966ms step_avg:1189.35ms | |
step:1178/1390 loss:3.4792 train_time:1389206ms step_avg:1189.39ms | |
step:1179/1390 loss:3.5523 train_time:1390464ms step_avg:1189.45ms | |
step:1180/1390 loss:3.4805 train_time:1391706ms step_avg:1189.49ms | |
step:1181/1390 loss:3.4715 train_time:1392928ms step_avg:1189.52ms | |
step:1182/1390 loss:3.4959 train_time:1394170ms step_avg:1189.56ms | |
step:1183/1390 loss:3.4969 train_time:1395406ms step_avg:1189.60ms | |
step:1184/1390 loss:3.4903 train_time:1396649ms step_avg:1189.65ms | |
step:1185/1390 loss:3.5482 train_time:1397880ms step_avg:1189.68ms | |
step:1186/1390 loss:3.4402 train_time:1399155ms step_avg:1189.76ms | |
step:1187/1390 loss:3.5530 train_time:1400407ms step_avg:1189.81ms | |
step:1188/1390 loss:3.5410 train_time:1401661ms step_avg:1189.86ms | |
step:1189/1390 loss:3.5419 train_time:1402906ms step_avg:1189.91ms | |
step:1190/1390 loss:3.5335 train_time:1404158ms step_avg:1189.96ms | |
step:1191/1390 loss:3.6621 train_time:1405406ms step_avg:1190.01ms | |
step:1192/1390 loss:3.6126 train_time:1406654ms step_avg:1190.06ms | |
step:1193/1390 loss:3.5152 train_time:1407895ms step_avg:1190.11ms | |
step:1194/1390 loss:3.5466 train_time:1409145ms step_avg:1190.16ms | |
step:1195/1390 loss:3.5387 train_time:1410392ms step_avg:1190.20ms | |
step:1196/1390 loss:3.5424 train_time:1411633ms step_avg:1190.25ms | |
step:1197/1390 loss:3.6169 train_time:1412901ms step_avg:1190.31ms | |
step:1198/1390 loss:3.5696 train_time:1414147ms step_avg:1190.36ms | |
step:1199/1390 loss:3.5271 train_time:1415393ms step_avg:1190.41ms | |
step:1200/1390 loss:3.5254 train_time:1416622ms step_avg:1190.44ms | |
step:1200/1390 val_loss:3.4580 train_time:1416652ms step_avg:1190.46ms | |
step:1201/1390 loss:3.5020 train_time:1417920ms step_avg:1190.53ms | |
step:1202/1390 loss:3.5137 train_time:1419182ms step_avg:1190.59ms | |
step:1203/1390 loss:3.4390 train_time:1420425ms step_avg:1190.63ms | |
step:1204/1390 loss:3.5097 train_time:1421667ms step_avg:1190.68ms | |
step:1205/1390 loss:3.5067 train_time:1422922ms step_avg:1190.73ms | |
step:1206/1390 loss:3.4455 train_time:1424158ms step_avg:1190.77ms | |
step:1207/1390 loss:3.5521 train_time:1425407ms step_avg:1190.82ms | |
step:1208/1390 loss:3.4889 train_time:1426650ms step_avg:1190.86ms | |
step:1209/1390 loss:3.4975 train_time:1427910ms step_avg:1190.92ms | |
step:1210/1390 loss:3.4797 train_time:1429168ms step_avg:1190.97ms | |
step:1211/1390 loss:3.5466 train_time:1430419ms step_avg:1191.02ms | |
step:1212/1390 loss:3.4659 train_time:1431654ms step_avg:1191.06ms | |
step:1213/1390 loss:3.5016 train_time:1432890ms step_avg:1191.10ms | |
step:1214/1390 loss:3.4562 train_time:1434139ms step_avg:1191.15ms | |
step:1215/1390 loss:3.5173 train_time:1435358ms step_avg:1191.17ms | |
step:1216/1390 loss:3.4968 train_time:1436583ms step_avg:1191.20ms | |
step:1217/1390 loss:3.4945 train_time:1437812ms step_avg:1191.23ms | |
step:1218/1390 loss:3.5076 train_time:1439030ms step_avg:1191.25ms | |
step:1219/1390 loss:3.5873 train_time:1440269ms step_avg:1191.29ms | |
step:1220/1390 loss:3.5540 train_time:1441506ms step_avg:1191.33ms | |
step:1221/1390 loss:3.5094 train_time:1442750ms step_avg:1191.37ms | |
step:1222/1390 loss:3.5650 train_time:1443986ms step_avg:1191.41ms | |
step:1223/1390 loss:3.5154 train_time:1445229ms step_avg:1191.45ms | |
step:1224/1390 loss:3.5177 train_time:1446490ms step_avg:1191.51ms | |
step:1225/1390 loss:3.4998 train_time:1447722ms step_avg:1191.54ms | |
step:1225/1390 val_loss:3.4525 train_time:1447751ms step_avg:1191.57ms | |
step:1226/1390 loss:3.4836 train_time:1448997ms step_avg:1191.61ms | |
step:1227/1390 loss:3.4699 train_time:1450237ms step_avg:1191.65ms | |
step:1228/1390 loss:3.4585 train_time:1451465ms step_avg:1191.68ms | |
step:1229/1390 loss:3.4663 train_time:1452723ms step_avg:1191.73ms | |
step:1230/1390 loss:3.4451 train_time:1453959ms step_avg:1191.77ms | |
step:1231/1390 loss:3.5096 train_time:1455207ms step_avg:1191.82ms | |
step:1232/1390 loss:3.4969 train_time:1456435ms step_avg:1191.85ms | |
step:1233/1390 loss:3.4546 train_time:1457665ms step_avg:1191.88ms | |
step:1234/1390 loss:3.5262 train_time:1458898ms step_avg:1191.91ms | |
step:1235/1390 loss:3.4997 train_time:1460132ms step_avg:1191.94ms | |
step:1236/1390 loss:3.5054 train_time:1461359ms step_avg:1191.97ms | |
step:1237/1390 loss:3.4579 train_time:1462624ms step_avg:1192.03ms | |
step:1238/1390 loss:3.4435 train_time:1463860ms step_avg:1192.07ms | |
step:1239/1390 loss:3.5265 train_time:1465108ms step_avg:1192.11ms | |
step:1240/1390 loss:3.4496 train_time:1466364ms step_avg:1192.17ms | |
step:1241/1390 loss:3.5663 train_time:1467603ms step_avg:1192.20ms | |
step:1242/1390 loss:3.5955 train_time:1468838ms step_avg:1192.24ms | |
step:1243/1390 loss:3.5319 train_time:1470077ms step_avg:1192.28ms | |
step:1244/1390 loss:3.5069 train_time:1471309ms step_avg:1192.31ms | |
step:1245/1390 loss:3.5245 train_time:1472536ms step_avg:1192.34ms | |
step:1246/1390 loss:3.4516 train_time:1473778ms step_avg:1192.38ms | |
step:1247/1390 loss:3.4801 train_time:1475017ms step_avg:1192.41ms | |
step:1248/1390 loss:3.5430 train_time:1476242ms step_avg:1192.44ms | |
step:1249/1390 loss:3.5138 train_time:1477478ms step_avg:1192.48ms | |
step:1250/1390 loss:3.4497 train_time:1478725ms step_avg:1192.52ms | |
step:1250/1390 val_loss:3.4472 train_time:1478755ms step_avg:1192.54ms | |
step:1251/1390 loss:3.4343 train_time:1480016ms step_avg:1192.60ms | |
step:1252/1390 loss:3.4563 train_time:1481251ms step_avg:1192.63ms | |
step:1253/1390 loss:3.4857 train_time:1482483ms step_avg:1192.67ms | |
step:1254/1390 loss:3.5021 train_time:1483748ms step_avg:1192.72ms | |
step:1255/1390 loss:3.4891 train_time:1484988ms step_avg:1192.76ms | |
step:1256/1390 loss:3.4793 train_time:1486230ms step_avg:1192.80ms | |
step:1257/1390 loss:3.4460 train_time:1487481ms step_avg:1192.85ms | |
step:1258/1390 loss:3.6526 train_time:1488746ms step_avg:1192.91ms | |
step:1259/1390 loss:3.4996 train_time:1489989ms step_avg:1192.95ms | |
step:1260/1390 loss:3.4579 train_time:1491233ms step_avg:1192.99ms | |
step:1261/1390 loss:3.4721 train_time:1492508ms step_avg:1193.05ms | |
step:1262/1390 loss:3.4915 train_time:1493762ms step_avg:1193.10ms | |
step:1263/1390 loss:3.4878 train_time:1495000ms step_avg:1193.14ms | |
step:1264/1390 loss:3.4719 train_time:1496234ms step_avg:1193.17ms | |
step:1265/1390 loss:3.4965 train_time:1497474ms step_avg:1193.21ms | |
step:1266/1390 loss:3.4884 train_time:1498721ms step_avg:1193.25ms | |
step:1267/1390 loss:3.4203 train_time:1499965ms step_avg:1193.29ms | |
step:1268/1390 loss:3.4345 train_time:1501207ms step_avg:1193.33ms | |
step:1269/1390 loss:3.4710 train_time:1502452ms step_avg:1193.37ms | |
step:1270/1390 loss:3.5227 train_time:1503685ms step_avg:1193.40ms | |
step:1271/1390 loss:3.4908 train_time:1504935ms step_avg:1193.45ms | |
step:1272/1390 loss:3.5135 train_time:1506174ms step_avg:1193.48ms | |
step:1273/1390 loss:3.5252 train_time:1507401ms step_avg:1193.51ms | |
step:1274/1390 loss:3.5245 train_time:1508638ms step_avg:1193.54ms | |
step:1275/1390 loss:3.4586 train_time:1509876ms step_avg:1193.58ms | |
step:1275/1390 val_loss:3.4436 train_time:1509905ms step_avg:1193.60ms | |
step:1276/1390 loss:3.4834 train_time:1511166ms step_avg:1193.65ms | |
step:1277/1390 loss:3.4399 train_time:1512420ms step_avg:1193.70ms | |
step:1278/1390 loss:3.5027 train_time:1513667ms step_avg:1193.74ms | |
step:1279/1390 loss:3.4567 train_time:1514939ms step_avg:1193.81ms | |
step:1280/1390 loss:3.5028 train_time:1516185ms step_avg:1193.85ms | |
step:1281/1390 loss:3.4860 train_time:1517423ms step_avg:1193.88ms | |
step:1282/1390 loss:3.5260 train_time:1518671ms step_avg:1193.92ms | |
step:1283/1390 loss:3.4121 train_time:1519929ms step_avg:1193.97ms | |
step:1284/1390 loss:3.4482 train_time:1521171ms step_avg:1194.01ms | |
step:1285/1390 loss:3.4781 train_time:1522424ms step_avg:1194.06ms | |
step:1286/1390 loss:3.5745 train_time:1523677ms step_avg:1194.10ms | |
step:1287/1390 loss:3.5035 train_time:1524920ms step_avg:1194.14ms | |
step:1288/1390 loss:3.4776 train_time:1526176ms step_avg:1194.19ms | |
step:1289/1390 loss:3.4526 train_time:1527453ms step_avg:1194.26ms | |
step:1290/1390 loss:3.4600 train_time:1528739ms step_avg:1194.33ms | |
step:1291/1390 loss:3.4664 train_time:1529994ms step_avg:1194.37ms | |
step:1292/1390 loss:3.5381 train_time:1531241ms step_avg:1194.42ms | |
step:1293/1390 loss:3.5139 train_time:1532491ms step_avg:1194.46ms | |
step:1294/1390 loss:3.4832 train_time:1533736ms step_avg:1194.50ms | |
step:1295/1390 loss:3.5239 train_time:1534992ms step_avg:1194.55ms | |
step:1296/1390 loss:3.5388 train_time:1536254ms step_avg:1194.60ms | |
step:1297/1390 loss:3.4505 train_time:1537494ms step_avg:1194.63ms | |
step:1298/1390 loss:3.5392 train_time:1538732ms step_avg:1194.67ms | |
step:1299/1390 loss:3.4640 train_time:1539970ms step_avg:1194.70ms | |
step:1300/1390 loss:3.5165 train_time:1541224ms step_avg:1194.75ms | |
step:1300/1390 val_loss:3.4409 train_time:1541254ms step_avg:1194.77ms | |
step:1301/1390 loss:3.5012 train_time:1542506ms step_avg:1194.81ms | |
step:1302/1390 loss:3.5004 train_time:1543750ms step_avg:1194.85ms | |
step:1303/1390 loss:3.4920 train_time:1545009ms step_avg:1194.90ms | |
step:1304/1390 loss:3.4697 train_time:1546251ms step_avg:1194.94ms | |
step:1305/1390 loss:3.4197 train_time:1547495ms step_avg:1194.98ms | |
step:1306/1390 loss:3.4718 train_time:1548739ms step_avg:1195.02ms | |
step:1307/1390 loss:3.4127 train_time:1549984ms step_avg:1195.05ms | |
step:1308/1390 loss:3.4521 train_time:1551212ms step_avg:1195.08ms | |
step:1309/1390 loss:3.5451 train_time:1552448ms step_avg:1195.11ms | |
step:1310/1390 loss:3.4079 train_time:1553693ms step_avg:1195.15ms | |
step:1311/1390 loss:3.4719 train_time:1554928ms step_avg:1195.18ms | |
step:1312/1390 loss:3.4658 train_time:1556171ms step_avg:1195.22ms | |
step:1313/1390 loss:3.4518 train_time:1557410ms step_avg:1195.25ms | |
step:1314/1390 loss:3.3791 train_time:1558657ms step_avg:1195.29ms | |
step:1315/1390 loss:3.5333 train_time:1559892ms step_avg:1195.32ms | |
step:1316/1390 loss:3.5405 train_time:1561123ms step_avg:1195.35ms | |
step:1317/1390 loss:3.4711 train_time:1562355ms step_avg:1195.37ms | |
step:1318/1390 loss:3.4599 train_time:1563594ms step_avg:1195.41ms | |
step:1319/1390 loss:3.5624 train_time:1564846ms step_avg:1195.45ms | |
step:1320/1390 loss:3.5152 train_time:1566088ms step_avg:1195.49ms | |
step:1321/1390 loss:3.5921 train_time:1567357ms step_avg:1195.54ms | |
step:1322/1390 loss:3.4607 train_time:1568604ms step_avg:1195.58ms | |
step:1323/1390 loss:3.5276 train_time:1569845ms step_avg:1195.62ms | |
step:1324/1390 loss:3.5314 train_time:1571099ms step_avg:1195.66ms | |
step:1325/1390 loss:3.4396 train_time:1572350ms step_avg:1195.70ms | |
step:1325/1390 val_loss:3.4391 train_time:1572379ms step_avg:1195.73ms | |
step:1326/1390 loss:3.4373 train_time:1573624ms step_avg:1195.76ms | |
step:1327/1390 loss:3.4795 train_time:1574847ms step_avg:1195.78ms | |
step:1328/1390 loss:3.3933 train_time:1576102ms step_avg:1195.83ms | |
step:1329/1390 loss:3.4888 train_time:1577337ms step_avg:1195.86ms | |
step:1330/1390 loss:3.4990 train_time:1578598ms step_avg:1195.91ms | |
step:1331/1390 loss:3.4854 train_time:1579872ms step_avg:1195.97ms | |
step:1332/1390 loss:3.5443 train_time:1581118ms step_avg:1196.00ms | |
step:1333/1390 loss:3.4875 train_time:1582353ms step_avg:1196.03ms | |
step:1334/1390 loss:3.4976 train_time:1583600ms step_avg:1196.07ms | |
step:1335/1390 loss:3.5210 train_time:1584855ms step_avg:1196.12ms | |
step:1336/1390 loss:3.4911 train_time:1586102ms step_avg:1196.16ms | |
step:1337/1390 loss:3.4871 train_time:1587350ms step_avg:1196.19ms | |
step:1338/1390 loss:3.4748 train_time:1588587ms step_avg:1196.23ms | |
step:1339/1390 loss:3.4709 train_time:1589830ms step_avg:1196.26ms | |
step:1340/1390 loss:3.4911 train_time:1591060ms step_avg:1196.29ms | |
step:1341/1390 loss:3.4939 train_time:1592307ms step_avg:1196.32ms | |
step:1342/1390 loss:3.5543 train_time:1593544ms step_avg:1196.35ms | |
step:1343/1390 loss:3.5131 train_time:1594788ms step_avg:1196.39ms | |
step:1344/1390 loss:3.4499 train_time:1596047ms step_avg:1196.44ms | |
step:1345/1390 loss:3.3821 train_time:1597306ms step_avg:1196.48ms | |
step:1346/1390 loss:3.4596 train_time:1598577ms step_avg:1196.54ms | |
step:1347/1390 loss:3.4939 train_time:1599834ms step_avg:1196.58ms | |
step:1348/1390 loss:3.4845 train_time:1601097ms step_avg:1196.63ms | |
step:1349/1390 loss:3.4819 train_time:1602342ms step_avg:1196.67ms | |
step:1350/1390 loss:3.4933 train_time:1603600ms step_avg:1196.72ms | |
step:1350/1390 val_loss:3.4379 train_time:1603630ms step_avg:1196.74ms | |
step:1351/1390 loss:3.5056 train_time:1604888ms step_avg:1196.78ms | |
step:1352/1390 loss:3.4224 train_time:1606142ms step_avg:1196.83ms | |
step:1353/1390 loss:3.4774 train_time:1607390ms step_avg:1196.87ms | |
step:1354/1390 loss:3.4597 train_time:1608629ms step_avg:1196.90ms | |
step:1355/1390 loss:3.4120 train_time:1609874ms step_avg:1196.93ms | |
step:1356/1390 loss:3.4890 train_time:1611155ms step_avg:1196.99ms | |
step:1357/1390 loss:3.6476 train_time:1612413ms step_avg:1197.04ms | |
step:1358/1390 loss:3.4887 train_time:1613652ms step_avg:1197.07ms | |
step:1359/1390 loss:3.4156 train_time:1614891ms step_avg:1197.10ms | |
step:1360/1390 loss:3.4472 train_time:1616144ms step_avg:1197.14ms | |
step:1361/1390 loss:3.4116 train_time:1617388ms step_avg:1197.18ms | |
step:1362/1390 loss:3.4556 train_time:1618650ms step_avg:1197.23ms | |
step:1363/1390 loss:3.4071 train_time:1619910ms step_avg:1197.27ms | |
step:1364/1390 loss:3.4366 train_time:1621157ms step_avg:1197.31ms | |
step:1365/1390 loss:3.4732 train_time:1622407ms step_avg:1197.35ms | |
step:1366/1390 loss:3.4852 train_time:1623669ms step_avg:1197.40ms | |
step:1367/1390 loss:3.4683 train_time:1624926ms step_avg:1197.44ms | |
step:1368/1390 loss:3.5189 train_time:1626194ms step_avg:1197.49ms | |
step:1369/1390 loss:3.4775 train_time:1627455ms step_avg:1197.54ms | |
step:1370/1390 loss:3.5424 train_time:1628708ms step_avg:1197.58ms | |
step:1371/1390 loss:3.4396 train_time:1629961ms step_avg:1197.62ms | |
step:1372/1390 loss:3.5064 train_time:1631200ms step_avg:1197.65ms | |
step:1373/1390 loss:3.4757 train_time:1632471ms step_avg:1197.70ms | |
step:1374/1390 loss:3.4373 train_time:1633707ms step_avg:1197.73ms | |
step:1375/1390 loss:3.4671 train_time:1634947ms step_avg:1197.76ms | |
step:1375/1390 val_loss:3.4376 train_time:1634978ms step_avg:1197.79ms | |
step:1376/1390 loss:3.4905 train_time:1636252ms step_avg:1197.84ms | |
step:1377/1390 loss:3.4521 train_time:1637506ms step_avg:1197.88ms | |
step:1378/1390 loss:3.4786 train_time:1638752ms step_avg:1197.92ms | |
step:1379/1390 loss:3.4103 train_time:1639992ms step_avg:1197.95ms | |
step:1380/1390 loss:3.3891 train_time:1641255ms step_avg:1198.00ms | |
step:1381/1390 loss:3.4961 train_time:1642522ms step_avg:1198.05ms | |
step:1382/1390 loss:3.4725 train_time:1643753ms step_avg:1198.07ms | |
step:1383/1390 loss:3.4434 train_time:1645029ms step_avg:1198.13ms | |
step:1384/1390 loss:3.4480 train_time:1646267ms step_avg:1198.16ms | |
step:1385/1390 loss:3.4244 train_time:1647501ms step_avg:1198.18ms | |
step:1386/1390 loss:3.4816 train_time:1648747ms step_avg:1198.22ms | |
step:1387/1390 loss:3.4753 train_time:1649981ms step_avg:1198.24ms | |
step:1388/1390 loss:3.4685 train_time:1651213ms step_avg:1198.27ms | |
step:1389/1390 loss:3.4864 train_time:1652455ms step_avg:1198.30ms | |
step:1390/1390 loss:3.4794 train_time:1653705ms step_avg:1198.34ms | |
step:1390/1390 val_loss:3.4376 train_time:1653736ms step_avg:1198.36ms | |
peak memory consumption: 56227 MiB |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment