Created
January 16, 2025 00:29
-
-
Save tysam-code/b3519fd58ce5c94d1016c8903e50736d to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import sys | |
with open(sys.argv[0]) as f: | |
code = f.read() # read the code of this file ASAP, for logging | |
import uuid | |
import time | |
import glob | |
import subprocess | |
import contextlib | |
from dataclasses import dataclass | |
from pathlib import Path | |
import torch | |
import torch._inductor.config as config | |
torch.empty(1, device='cuda', requires_grad=True).backward() # What does this do? | |
from torch import Tensor, nn | |
import torch.nn.functional as F | |
import torch.distributed as dist | |
from torch.distributed.algorithms.ddp_comm_hooks import default_hooks | |
from torch.nn.parallel import DistributedDataParallel as DDP | |
# use of FlexAttention contributed by @KoszarskyB | |
from torch.nn.attention.flex_attention import BlockMask, flex_attention | |
config.coordinate_descent_tuning = True | |
# ----------------------------------------------------------------------------- | |
# Muon optimizer | |
@torch.compile | |
def zeropower_via_newtonschulz5(G, steps): | |
""" | |
Newton-Schulz iteration to compute the zeroth power / orthogonalization of G. We opt to use a | |
quintic iteration whose coefficients are selected to maximize the slope at zero. For the purpose | |
of minimizing steps, it turns out to be empirically effective to keep increasing the slope at | |
zero even beyond the point where the iteration no longer converges all the way to one everywhere | |
on the interval. This iteration therefore does not produce UV^T but rather something like US'V^T | |
where S' is diagonal with S_{ii}' ~ Uniform(0.5, 1.5), which turns out not to hurt model | |
performance at all relative to UV^T, where USV^T = G is the SVD. | |
""" | |
assert len(G.shape) == 2 | |
a, b, c = (3.4445, -4.7750, 2.0315) | |
X = G.bfloat16() | |
if G.size(0) > G.size(1): | |
X = X.T | |
# Ensure spectral norm is at most 1 | |
X = X / (X.norm() + 1e-7) | |
# Perform the NS iterations | |
for _ in range(steps): | |
A = X @ X.T | |
B = b * A + c * A @ A # adapted from suggestion by @jxbz, @leloykun, and @YouJiacheng | |
X = a * X + B @ X | |
if G.size(0) > G.size(1): | |
X = X.T | |
return X | |
class Muon(torch.optim.Optimizer): | |
""" | |
Muon - MomentUm Orthogonalized by Newton-schulz | |
Muon internally runs standard SGD-momentum, and then performs an orthogonalization post- | |
processing step, in which each 2D parameter's update is replaced with the nearest orthogonal | |
matrix. To efficiently orthogonalize each update, we use a Newton-Schulz iteration, which has | |
the advantage that it can be stably run in bfloat16 on the GPU. | |
Some warnings: | |
- This optimizer assumes that all parameters passed in are 2D. | |
- It should not be used for the embedding layer, the final fully connected layer, or any {0,1}-D | |
parameters; those should all be optimized by a standard method (e.g., AdamW). | |
- To use it with 4D convolutional filters, it works well to just flatten their last 3 dimensions. | |
- We believe it is unlikely to work well for training with small batch size. | |
- We believe it may not work well for finetuning pretrained models, but we haven't tested this. | |
- We have not yet tried this optimizer for training scenarios larger than NanoGPT (124M). | |
Arguments: | |
lr: The learning rate used by the internal SGD. | |
momentum: The momentum used by the internal SGD. | |
nesterov: Whether to use Nesterov-style momentum in the internal SGD. (recommended) | |
ns_steps: The number of Newton-Schulz iteration steps to use. | |
""" | |
def __init__(self, params, lr=0.02, momentum=0.95, nesterov=True, ns_steps=5): | |
self.world_size = int(os.environ['WORLD_SIZE']) | |
self.rank = int(os.environ['RANK']) | |
defaults = dict(lr=lr, momentum=momentum, nesterov=nesterov, ns_steps=ns_steps) | |
assert all(isinstance(p, torch.Tensor) for p in params) | |
sizes = {p.numel() for p in params} | |
param_groups = [dict(params=[p for p in params if p.numel() == size], | |
update_buffer=[torch.empty(size, device='cuda', dtype=torch.bfloat16) for _ in range(self.world_size)]) | |
for size in sizes] | |
super().__init__(param_groups, defaults) | |
# pre-init momentum | |
for group in self.param_groups: | |
for p in group['params']: | |
self.state[p]['momentum_buffer'] = torch.zeros(p.shape, device=p.device, dtype=torch.float) | |
def step(self): | |
for group in self.param_groups: | |
lr = group['lr'] | |
momentum = group['momentum'] | |
nesterov = group['nesterov'] | |
ns_steps = group['ns_steps'] | |
update_buffers = group['update_buffer'] | |
# generate weight updates in distributed fashion | |
params = group['params'] | |
handle = None | |
params_world = None | |
def update_prev(): | |
if params_world is None: | |
return | |
assert handle is not None | |
handle.wait() | |
for p_world, g_world in zip(params_world, update_buffers): | |
p_world.data.add_( | |
g_world.view_as(p_world), | |
alpha=-lr * max(1, p_world.size(0) / p_world.size(1)) ** 0.5, | |
) | |
for base_i in range(len(params))[::self.world_size]: | |
if base_i + rank < len(params): | |
p = params[base_i + self.rank] | |
g = p.grad | |
assert g is not None | |
state = self.state[p] | |
buf = state['momentum_buffer'] | |
buf.lerp_(g, 1 - momentum) | |
g = g.lerp_(buf, momentum) if nesterov else buf | |
g = zeropower_via_newtonschulz5(g, steps=ns_steps).flatten() | |
else: | |
g = update_buffers[rank] | |
update_prev() # async all_gather instead of sync all_reduce by @YouJiacheng | |
handle = dist.all_gather(update_buffers, g, async_op=True) | |
params_world = params[base_i : base_i + self.world_size] | |
update_prev() | |
# ----------------------------------------------------------------------------- | |
# PyTorch nn.Module definitions for the GPT-2 model | |
# semi-orthogonal initialization @fernbear.bsky.social | |
def semi_orthogonal_init(dim_in, dim_out, steps=5): | |
# default pytorch linear layer init | |
weight= (1/dim_in)**.5*(2*(torch.rand(dim_out, dim_in, device='cuda')) - 1.) | |
w_std = weight.std(dim=0).mean() | |
weight = zeropower_via_newtonschulz5(weight, steps=steps) | |
weight *= w_std/weight.std(dim=0).mean().add_(1e-8) | |
return weight.float() | |
# fused scalar <-> RMSNorm @fernbear.bsky.social | |
class RMSNormScalar(nn.Module): | |
def __init__(self, init_val=.5413): | |
super().__init__() | |
self.scale = torch.nn.Parameter(torch.tensor(init_val)) # Softplus of default value is ~1 | |
def forward(self, x): | |
norm = x.float().norm(dim=-1, keepdim=True).type_as(x) | |
norm = norm.add(torch.finfo(norm.dtype).eps) | |
rms_inv = (F.softplus(self.scale.type_as(x)) * x.shape[-1] ** .5) / norm | |
result = x * rms_inv | |
return result | |
class CastedLinear(nn.Linear): | |
def __init__(self, in_features, out_features): | |
super().__init__(in_features, out_features, bias=False) | |
def forward(self, x): | |
return F.linear(x, self.weight.type_as(x)) | |
class Rotary(nn.Module): | |
def __init__(self, dim, max_seq_len=65536): | |
super().__init__() | |
# half-truncate RoPE by @YouJiacheng | |
angular_freq = (1 / 1024) ** torch.linspace(0, 1, steps=dim//4, dtype=torch.float32) | |
angular_freq = torch.cat([angular_freq, angular_freq.new_zeros(dim//4)]) | |
t = torch.arange(max_seq_len, dtype=torch.float32) | |
theta = torch.einsum('i,j -> ij', t, angular_freq) | |
self.cos = nn.Buffer(theta.cos(), persistent=False) | |
self.sin = nn.Buffer(theta.sin(), persistent=False) | |
def forward(self, x): | |
cos, sin = self.cos[None, :x.size(-3), None, :], self.sin[None, :x.size(-3), None, :] | |
x1, x2 = x.float().chunk(2, dim=-1) | |
y1 = x1 * cos + x2 * sin | |
y2 = x1 * (-sin) + x2 * cos | |
return torch.cat((y1, y2), 3).type_as(x) | |
class CausalSelfAttention(nn.Module): | |
def __init__(self, dim, num_heads, v_res=True): | |
super().__init__() | |
assert dim % num_heads == 0 | |
num_heads = num_heads | |
self.num_heads = num_heads | |
self.c_q = nn.Parameter(semi_orthogonal_init(dim, dim)) | |
self.c_k = nn.Parameter(semi_orthogonal_init(dim, dim)) | |
self.c_v = nn.Parameter(semi_orthogonal_init(dim, dim)) | |
self.qkv_scale = nn.Parameter(torch.ones(3*dim)) | |
self.q_norm = RMSNormScalar() | |
self.k_norm = RMSNormScalar() | |
if v_res: | |
self.v_lambda = nn.Parameter(torch.tensor([-.4328])) #~zero init after softplus | |
self.rotary = Rotary(dim // num_heads) | |
self.c_proj = nn.Parameter(torch.zeros(dim, dim)) | |
self.c_proj_scale = nn.Parameter(torch.ones(dim)) | |
def forward(self, x, ve, block_mask): | |
B, T = x.size(0), x.size(1) # batch size, sequence length | |
assert B == 1, 'Must use batch size = 1 for FlexAttention' | |
qkv_weight = (self.qkv_scale.unsqueeze(1) * torch.cat([self.c_q, self.c_k, self.c_v], dim=0)).type_as(x) # fuse weights @fernbear.bsky.social | |
q, k, v = F.linear(x, qkv_weight).view(B, T, 3*self.num_heads, -1).chunk(3, dim=-2) | |
if ve is not None: # skip mid-layers token value embeddings by @YouJiacheng | |
v = v + F.softplus(self.v_lambda.type_as(v)) * ve.view_as(v) # @KoszarskyB & @Grad62304977 & @fernbear.bsky.social | |
q, k = self.q_norm(q), self.k_norm(k) # QK norm @Grad62304977, learnable scalar @brendanh0gan | |
q, k = self.rotary(q), self.rotary(k) | |
y = flex_attention(q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2), block_mask=block_mask) | |
y = y.transpose(1, 2).contiguous().view_as(x) # re-assemble all head outputs side by side | |
y = F.linear(y, (self.c_proj_scale*self.c_proj).type_as(x)) | |
return y | |
class MLP(nn.Module): | |
def __init__(self, dim, expand=4): | |
super().__init__() | |
expand_dim = expand*dim | |
self.c_fc_scale = nn.Parameter(torch.ones(dim)) | |
self.c_fc = nn.Parameter(semi_orthogonal_init(dim, expand_dim)) | |
self.c_proj = nn.Parameter(torch.zeros(dim, expand_dim)) | |
self.c_proj_scale = nn.Parameter(torch.ones(dim)) | |
def forward(self, x): | |
x = F.linear(x, (self.c_fc*self.c_fc_scale.unsqueeze(0)).type_as(x)) # fuse weight & weight_scale mults | |
x = F.relu(x).square() # https://arxiv.org/abs/2109.08668v2; ~1-2% better than GELU; suggested by @SKYLINEZ007 and @Grad62304977 | |
x = F.linear(x, (self.c_proj_scale.unsqueeze(1) * self.c_proj).type_as(x)) | |
return x | |
class Block(nn.Module): | |
def __init__(self, model_dim, num_heads, block_num, use_attn=True): | |
super().__init__() | |
self.attn = CausalSelfAttention(model_dim, num_heads, v_res=(block_num in [0, 1, 2, 9, 10, 11])) if use_attn else None | |
self.attn_norm = RMSNormScalar(-.4328) if self.attn is not None else None | |
self.mlp = MLP(model_dim) | |
self.mlp_norm = RMSNormScalar() | |
self.lambdas = nn.Parameter(torch.tensor([1., 0.])) | |
def forward(self, x, ve, x0, block_mask): | |
x = self.lambdas[0] * x + self.lambdas[1] * x0 | |
if self.attn is not None: | |
x = x + self.attn(self.attn_norm(x), ve, block_mask) | |
x = x + self.mlp(self.mlp_norm(x)) | |
return x | |
class ValueEmbedding(nn.Module): | |
def __init__(self, vocab_size, model_dim): | |
super().__init__() | |
self.embed = nn.ModuleList([nn.Embedding(vocab_size, model_dim).bfloat16() for _ in range(3)]) | |
def forward(self, inputs): | |
ve = [emb(inputs) for emb in self.embed] | |
# 012 ... 012 structure on token value embeddings by @YouJiacheng, improved on @leloykun's U-net structure | |
ve = [ve[0], ve[1], ve[2], None, None, None, None, None, None, ve[0], ve[1], ve[2]] | |
return ve | |
# ----------------------------------------------------------------------------- | |
# The main GPT-2 model | |
class GPT(nn.Module): | |
def __init__(self, vocab_size, num_layers, num_heads, model_dim): | |
super().__init__() | |
self.embed = nn.Embedding(vocab_size, model_dim) | |
self.embed_norm = RMSNormScalar() | |
# skip attention of blocks.7 (the 8th layer) by @YouJiacheng | |
self.blocks = nn.ModuleList([Block(model_dim, num_heads, i, use_attn=(i != 7)) | |
for i in range(num_layers)]) | |
# token value embeddings by @KoszarskyB - inspired by @Grad62304977's value residual learning | |
# U-net structure on token value embeddings by @leloykun | |
self.value_embeds = ValueEmbedding(vocab_size, model_dim) | |
self.lm_head = nn.Parameter(torch.zeros(vocab_size, model_dim)) # zero init - @Grad6230497 | |
self.lm_head_scale = nn.Parameter(torch.ones(model_dim)) | |
self.out_norm = RMSNormScalar() | |
self.out_bias = nn.Embedding(vocab_size, model_dim) | |
# U-net design by @brendanh0gan | |
self.num_encoder_layers = num_layers // 2 # Half of the layers for encoder | |
self.num_decoder_layers = num_layers - self.num_encoder_layers # Remaining for decoder | |
# Add learnable skip connection weights for decoder layers | |
self.skip_weights = nn.Parameter(torch.ones(self.num_decoder_layers)) | |
self.register_buffer('noise_scale', torch.tensor(0.1)) | |
def forward(self, inputs, targets, sliding_window_num_blocks): | |
BLOCK_SIZE = 128 | |
seq_len = len(inputs) | |
assert seq_len % BLOCK_SIZE == 0 | |
total_num_blocks = seq_len // BLOCK_SIZE | |
assert inputs.ndim == 1 | |
docs = (inputs == 50256).cumsum(0) | |
docs_low = docs.view(-1, BLOCK_SIZE)[:, 0].contiguous() | |
docs_high = docs.view(-1, BLOCK_SIZE)[:, -1].contiguous() | |
def document_causal(b, h, q_idx, kv_idx): | |
causal_mask = q_idx >= kv_idx | |
document_mask = docs[q_idx] == docs[kv_idx] | |
return causal_mask & document_mask | |
def dense_to_ordered(dense_mask): | |
num_blocks = dense_mask.sum(dim=-1, dtype=torch.int32) | |
indices = dense_mask.argsort(dim=-1, descending=True, stable=True).to(torch.int32) | |
return num_blocks[None, None].contiguous(), indices[None, None].contiguous() | |
def create_doc_swc_block_mask(sliding_window_num_blocks): | |
kv_idx = block_idx = torch.arange(total_num_blocks, dtype=torch.int32, device='cuda') | |
q_idx = block_idx[:, None] | |
causal_bm = q_idx >= kv_idx | |
causal_full_bm = q_idx > kv_idx | |
window_bm = q_idx - kv_idx < sliding_window_num_blocks | |
window_full_bm = window_bm # block-wise sliding window by @YouJiacheng | |
# document_bm = (docs_low[q_idx] <= docs_high[kv_idx]) & (docs_low[kv_idx] <= docs_high[q_idx]) | |
document_bm = (docs_low[:, None] <= docs_high) & (docs_low <= docs_high[:, None]) | |
document_full_bm = (docs_low[:, None] == docs_high) & (docs_low == docs_high[:, None]) | |
nonzero_bm = causal_bm & window_bm & document_bm | |
full_bm = causal_full_bm & window_full_bm & document_full_bm | |
kv_num_blocks, kv_indices = dense_to_ordered(nonzero_bm & ~full_bm) | |
full_kv_num_blocks, full_kv_indices = dense_to_ordered(full_bm) | |
return BlockMask.from_kv_blocks( | |
kv_num_blocks, | |
kv_indices, | |
full_kv_num_blocks, | |
full_kv_indices, | |
BLOCK_SIZE=BLOCK_SIZE, | |
mask_mod=document_causal, | |
) | |
block_mask = create_doc_swc_block_mask(sliding_window_num_blocks) | |
embeds = self.embed(inputs[None]) | |
noise = self.noise_scale * torch.fmod(torch.randn_like(embeds), 2.) # input noising by schedule by @brendanh0gan | |
x0 = self.embed_norm(embeds + noise).bfloat16() # use of norm here by @Grad62304977, scalar norm by @fernbear.bsky.social | |
x = x0 | |
ve = self.value_embeds(inputs) | |
assert len(ve) == len(self.blocks) | |
ve_enc, ve_dec = ve[:self.num_encoder_layers], ve[self.num_encoder_layers:] | |
# Store outputs for U-Net skip connections | |
skip_connections = [] | |
# Encoder pass - process only the first half of the blocks | |
for i in range(self.num_encoder_layers): | |
x = self.blocks[i](x, ve_enc[i], x0, block_mask) | |
skip_connections.append(x) | |
# Decoder pass - process the remaining blocks with weighted skip connections | |
for i in range(self.num_decoder_layers): | |
x = x + self.skip_weights[i] * skip_connections.pop() | |
# U-net structure on token value embeddings by @leloykun | |
x = self.blocks[self.num_encoder_layers + i](x, ve_dec[i], x0, block_mask) | |
x = self.out_norm(x - x0 + self.out_bias(inputs).bfloat16()) # input subtract, per-token output bias by @fernbear.bsky.social | |
logits = F.linear(x, ((self.lm_head_scale / 7.5) * self.lm_head).type_as(x)) # fuse sigmoid scale & weight scale @fernbear.bsky.social | |
logits = 30 * torch.sigmoid(logits) # @Grad62304977 added tanh softcapping, @KoszarskyB reduced it from 30 to 15, @YuJiacheng tanh -> sigmoid + different scaling | |
logits = logits.float() | |
loss = F.cross_entropy(logits.view(-1, logits.size(-1)), targets) | |
return loss | |
# ----------------------------------------------------------------------------- | |
# Our own simple Distributed Data Loader | |
def _load_data_shard(path): | |
# only reads the header, returns header data | |
# header is 256 int32 | |
header = torch.from_file(path, False, 256, dtype=torch.int32) | |
assert header[0] == 20240520, 'magic number mismatch in the data .bin file' | |
assert header[1] == 1, 'unsupported version' | |
num_tokens = int(header[2]) # number of tokens (claimed) | |
with open(path, 'rb', buffering=0) as f: | |
tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) # avoid pin_memory copy by @YouJiacheng | |
f.seek(256 * 4) | |
nbytes = f.readinto(tokens.numpy()) # avoid bytes->array copy by @YouJiacheng | |
assert nbytes == 2 * num_tokens, 'number of tokens read does not match header' | |
return tokens | |
class DistributedDataLoader: | |
def __init__(self, filename_pattern): | |
self.rank = int(os.environ['RANK']) | |
self.world_size = int(os.environ['WORLD_SIZE']) | |
self.files = sorted(glob.glob(filename_pattern)) | |
self.reset() | |
def reset(self): | |
self.current_shard = -1 | |
self.advance() | |
def advance(self): | |
self.current_shard = (self.current_shard + 1) % len(self.files) | |
self.current_position = 0 | |
self.tokens = _load_data_shard(self.files[self.current_shard]) | |
def next_batch(self, batch_size): | |
assert batch_size % self.world_size == 0 | |
device_batch_size = batch_size // self.world_size | |
# load next shard if necessary | |
if self.current_position + batch_size + 1 >= len(self.tokens): | |
self.advance() | |
pos = self.current_position + self.rank * device_batch_size | |
device_batch_tokens = self.tokens[pos:pos+device_batch_size+1] | |
# advance current position | |
self.current_position += batch_size | |
inputs = device_batch_tokens[:-1].to(device='cuda', dtype=torch.int32, non_blocking=True) | |
targets = device_batch_tokens[1:].to(device='cuda', dtype=torch.int64, non_blocking=True) | |
return inputs, targets | |
# ----------------------------------------------------------------------------- | |
# int main | |
@dataclass | |
class Hyperparameters: | |
# data | |
train_bin = 'data/fineweb10B/fineweb_train_*.bin' # input .bin to train on | |
val_bin = 'data/fineweb10B/fineweb_val_*.bin' # input .bin to eval validation loss on | |
# optimization | |
batch_size = 8*64*1024 # batch size in tokens | |
max_device_batch_size = 64*1024 # batch size per device in tokens | |
num_iterations = 1330 #1350 #1390 # number of iterations to run | |
cooldown_frac = 0.4 # fraction of training spent cooling down the learning rate | |
bf16_embeds = True | |
# evaluation and logging | |
val_loss_every = 125 # every how many steps to evaluate val loss? 0 for only at the end | |
val_tokens = 10485760 # how many tokens of validation data? it's important to keep this fixed for consistent comparisons | |
# implementation | |
save_checkpoint = False | |
args = Hyperparameters() | |
micro_bs = args.max_device_batch_size | |
# set up DDP (distributed data parallel). torchrun sets this env variable | |
rank = int(os.environ['RANK']) | |
local_rank = int(os.environ['LOCAL_RANK']) | |
world_size = int(os.environ['WORLD_SIZE']) | |
assert torch.cuda.is_available() | |
torch.cuda.set_device(local_rank) | |
dist.init_process_group(backend='nccl', device_id=torch.device(local_rank)) | |
dist.barrier() | |
master_process = (rank == 0) # this process will do logging, checkpointing etc. | |
# begin logging | |
logfile = None | |
if master_process: | |
run_id = uuid.uuid4() | |
os.makedirs('logs', exist_ok=True) | |
logfile = f'logs/{run_id}.txt' | |
print(logfile) | |
def print0(s, console=False): | |
if master_process: | |
with open(logfile, 'a') as f: | |
if console: | |
print(s) | |
print(s, file=f) | |
# begin by printing this file (the Python code) | |
print0(code) | |
print0('='*100) | |
# log information about the hardware/software environment this is running on | |
print0(f'Running Python {sys.version}') | |
print0(f'Running PyTorch {torch.version.__version__} compiled for CUDA {torch.version.cuda}') | |
print0(subprocess.run(['nvidia-smi'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True).stdout) | |
print0('='*100) | |
# load data | |
train_loader = DistributedDataLoader(args.train_bin) | |
val_loader = DistributedDataLoader(args.val_bin) | |
print0(f'Training dataloader files: {train_loader.files}') | |
print0(f'Validation dataloader files: {val_loader.files}') | |
print0('='*100) | |
# there are only 50257 unique GPT-2 tokens; we extend to nearest multiple of 128 for efficiency. suggested to me by @Grad62304977. | |
# this originates from Karpathy's experiments. | |
model = GPT(vocab_size=50304, num_layers=12, num_heads=6, model_dim=768) | |
model = model.cuda() | |
if args.bf16_embeds: | |
for m in model.modules(): | |
if isinstance(m, nn.Embedding): | |
m.bfloat16() | |
model = torch.compile(model) | |
ddp_model = DDP(model, device_ids=[local_rank], broadcast_buffers=False, gradient_as_bucket_view=True) | |
# collect the parameters to optimize | |
hidden_matrix_params = [p for p in model.blocks.parameters() if p.ndim == 2] | |
embed_params = [model.out_bias.weight, model.embed.weight, *model.value_embeds.parameters()] | |
scalar_params = [p for p in model.parameters() if p.ndim < 2] | |
head_params = [model.lm_head] | |
# init the optimizer(s) | |
optimizer1 = torch.optim.Adam([dict(params=embed_params, lr=0.6), | |
dict(params=head_params, lr=0.008), | |
dict(params=scalar_params, lr=0.04)], | |
betas=(0.8, 0.95), fused=True) | |
optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95) | |
optimizers = [optimizer1, optimizer2] | |
# learning rate schedule: stable then decay | |
def get_lr(it): | |
t = 1 - it / args.num_iterations # time remaining in training | |
assert 1 >= t >= 0 | |
w = min(t / args.cooldown_frac, 1.0) # 1 -> 0 | |
return w * 1.0 + (1 - w) * 0.1 | |
schedulers = [torch.optim.lr_scheduler.LambdaLR(opt, get_lr) for opt in optimizers] | |
# sliding window size schedule: linear increase over training in chunks of 128 from 128 -> 1792. By @fernbear.bsky.social | |
def get_sliding_window_blocks(it): | |
x = it / args.num_iterations # training progress | |
x = max(min(x, 1.), 0.) | |
return int(((1 - x) * 128 + x * 1856) // 128) | |
sliding_window_num_blocks = torch.tensor(1, dtype=torch.int32, device='cuda') | |
# Start training loop | |
training_time_ms = 0 | |
# start the clock | |
torch.cuda.synchronize() | |
t0 = time.perf_counter() | |
# begin training | |
train_steps = args.num_iterations | |
for step in range(train_steps + 1): | |
last_step = (step == train_steps) | |
# This effectively ignores timing first 10 steps, which are slower for weird reasons. | |
# Alternately, and slightly more correctly in terms of benchmarking, we could do 10 | |
# steps with dummy data first, and then re-initialize the model and reset the loader. | |
if step == 10: | |
training_time_ms = 0 | |
t0 = time.perf_counter() | |
timed_steps = float('nan') if step <= 11 else (step - 10) + 1 # <= 11 to avoid bug in val | |
sliding_window_num_blocks.copy_(get_sliding_window_blocks(step), non_blocking=True) | |
# Update noise scale to follow triangular schedule | |
# Cosine decay from 0.01 to 0 over entire training | |
progress = step / args.num_iterations | |
noise_scale = torch.cos(torch.tensor(progress * torch.pi)).mul_(.01) | |
model.noise_scale.copy_(noise_scale, non_blocking=True) # update model with new noise scale | |
# --------------- VALIDATION SECTION ----------------- | |
if last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): | |
# stop the clock | |
torch.cuda.synchronize() | |
training_time_ms += 1000 * (time.perf_counter() - t0) | |
# run validation batches | |
model.eval() | |
val_loader.reset() | |
val_loss = 0.0 | |
# copy the noise scale out temporarily | |
noise_scale_cache = model.noise_scale.clone() | |
model.noise_scale.zero_() | |
# calculate the number of steps to take in the val loop. | |
val_batch_size = world_size * micro_bs | |
assert args.val_tokens % val_batch_size == 0 | |
val_steps = args.val_tokens // val_batch_size | |
for _ in range(val_steps): | |
with torch.no_grad(): | |
inputs_val, targets_val = val_loader.next_batch(val_batch_size) | |
val_loss += ddp_model(inputs_val, targets_val, sliding_window_num_blocks) | |
dist.all_reduce(val_loss, op=dist.ReduceOp.AVG) | |
val_loss /= val_steps | |
# logging | |
print0(f'step:{step}/{train_steps} val_loss:{val_loss:.4f} train_time:{training_time_ms:.0f}ms step_avg:{training_time_ms/(timed_steps-1):.2f}ms', console=True) | |
# reset noise value | |
model.noise_scale.copy_(noise_scale_cache, non_blocking=True) | |
# start the clock again | |
torch.cuda.synchronize() | |
t0 = time.perf_counter() | |
if last_step: | |
if master_process and args.save_checkpoint: | |
log = dict(step=step, code=code, model=model.state_dict(), optimizers=[opt.state_dict() for opt in optimizers]) | |
os.makedirs(f'logs/{run_id}', exist_ok=True) | |
torch.save(log, f'logs/{run_id}/state_step{step:06d}.pt') | |
# the last step only has the validation loop, so break to avoid training | |
break | |
# --------------- TRAINING SECTION ----------------- | |
model.train() | |
batch_size = args.batch_size | |
assert batch_size % world_size == 0 | |
inputs_train, targets_train = train_loader.next_batch(batch_size) | |
assert len(inputs_train) <= micro_bs or len(inputs_train) % micro_bs == 0 | |
for micro_inputs_train, micro_targets_train in zip(inputs_train.split(micro_bs), targets_train.split(micro_bs)): | |
ddp_model(micro_inputs_train, micro_targets_train, sliding_window_num_blocks).mul(world_size).backward() | |
# momentum warmup for Muon | |
frac = min(step/300, 1) | |
for group in optimizer2.param_groups: | |
group['momentum'] = (1 - frac) * 0.85 + frac * 0.95 | |
# step the optimizers and schedulers | |
for opt, sched in zip(optimizers, schedulers): | |
opt.step() | |
if step != train_steps-1: | |
sched.step() | |
# null the gradients | |
model.zero_grad(set_to_none=True) | |
# logging | |
approx_time = training_time_ms + 1000 * (time.perf_counter() - t0) | |
print0(f'step:{step+1}/{train_steps} train_time:{approx_time:.0f}ms step_avg:{approx_time/timed_steps:.2f}ms', console=True) | |
print0(f'peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB') | |
dist.destroy_process_group() | |
==================================================================================================== | |
Running Python 3.12.7 (main, Jan 15 2025, 18:11:24) [GCC 13.2.0] | |
Running PyTorch 2.7.0.dev20250107+cu126 compiled for CUDA 12.6 | |
Wed Jan 15 21:07:51 2025 | |
+-----------------------------------------------------------------------------------------+ | |
| NVIDIA-SMI 550.127.05 Driver Version: 550.127.05 CUDA Version: 12.6 | | |
|-----------------------------------------+------------------------+----------------------+ | |
| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC | | |
| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. | | |
| | | MIG M. | | |
|=========================================+========================+======================| | |
| 0 NVIDIA H100 80GB HBM3 On | 00000000:61:00.0 Off | 0 | | |
| N/A 28C P0 115W / 700W | 7746MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+------------------------+----------------------+ | |
| 1 NVIDIA H100 80GB HBM3 On | 00000000:62:00.0 Off | 0 | | |
| N/A 36C P0 122W / 700W | 3456MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+------------------------+----------------------+ | |
| 2 NVIDIA H100 80GB HBM3 On | 00000000:63:00.0 Off | 0 | | |
| N/A 37C P0 125W / 700W | 3456MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+------------------------+----------------------+ | |
| 3 NVIDIA H100 80GB HBM3 On | 00000000:64:00.0 Off | 0 | | |
| N/A 28C P0 117W / 700W | 3456MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+------------------------+----------------------+ | |
| 4 NVIDIA H100 80GB HBM3 On | 00000000:6A:00.0 Off | 0 | | |
| N/A 30C P0 119W / 700W | 3456MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+------------------------+----------------------+ | |
| 5 NVIDIA H100 80GB HBM3 On | 00000000:6B:00.0 Off | 0 | | |
| N/A 40C P0 128W / 700W | 3456MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+------------------------+----------------------+ | |
| 6 NVIDIA H100 80GB HBM3 On | 00000000:6C:00.0 Off | 0 | | |
| N/A 35C P0 114W / 700W | 3456MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+------------------------+----------------------+ | |
| 7 NVIDIA H100 80GB HBM3 On | 00000000:6D:00.0 Off | 0 | | |
| N/A 28C P0 115W / 700W | 3216MiB / 81559MiB | 0% Default | | |
| | | Disabled | | |
+-----------------------------------------+------------------------+----------------------+ | |
+-----------------------------------------------------------------------------------------+ | |
| Processes: | | |
| GPU GI CI PID Type Process name GPU Memory | | |
| ID ID Usage | | |
|=========================================================================================| | |
+-----------------------------------------------------------------------------------------+ | |
==================================================================================================== | |
Training dataloader files: ['data/fineweb10B/fineweb_train_000001.bin', 'data/fineweb10B/fineweb_train_000002.bin', 'data/fineweb10B/fineweb_train_000003.bin', 'data/fineweb10B/fineweb_train_000004.bin', 'data/fineweb10B/fineweb_train_000005.bin', 'data/fineweb10B/fineweb_train_000006.bin', 'data/fineweb10B/fineweb_train_000007.bin', 'data/fineweb10B/fineweb_train_000008.bin', 'data/fineweb10B/fineweb_train_000009.bin', 'data/fineweb10B/fineweb_train_000010.bin'] | |
Validation dataloader files: ['data/fineweb10B/fineweb_val_000000.bin'] | |
==================================================================================================== | |
step:0/1330 val_loss:10.8258 train_time:2ms step_avg:nanms | |
step:1/1330 train_time:27929ms step_avg:nanms | |
step:2/1330 train_time:28275ms step_avg:nanms | |
step:3/1330 train_time:28400ms step_avg:nanms | |
step:4/1330 train_time:28530ms step_avg:nanms | |
step:5/1330 train_time:28659ms step_avg:nanms | |
step:6/1330 train_time:28789ms step_avg:nanms | |
step:7/1330 train_time:28920ms step_avg:nanms | |
step:8/1330 train_time:29049ms step_avg:nanms | |
step:9/1330 train_time:29181ms step_avg:nanms | |
step:10/1330 train_time:29316ms step_avg:nanms | |
step:11/1330 train_time:132ms step_avg:nanms | |
step:12/1330 train_time:263ms step_avg:nanms | |
step:13/1330 train_time:393ms step_avg:131.11ms | |
step:14/1330 train_time:523ms step_avg:130.83ms | |
step:15/1330 train_time:654ms step_avg:130.79ms | |
step:16/1330 train_time:786ms step_avg:130.95ms | |
step:17/1330 train_time:919ms step_avg:131.27ms | |
step:18/1330 train_time:1052ms step_avg:131.48ms | |
step:19/1330 train_time:1183ms step_avg:131.46ms | |
step:20/1330 train_time:1315ms step_avg:131.53ms | |
step:21/1330 train_time:1447ms step_avg:131.50ms | |
step:22/1330 train_time:1577ms step_avg:131.38ms | |
step:23/1330 train_time:1708ms step_avg:131.38ms | |
step:24/1330 train_time:1839ms step_avg:131.36ms | |
step:25/1330 train_time:1972ms step_avg:131.46ms | |
step:26/1330 train_time:2103ms step_avg:131.42ms | |
step:27/1330 train_time:2235ms step_avg:131.45ms | |
step:28/1330 train_time:2369ms step_avg:131.62ms | |
step:29/1330 train_time:2501ms step_avg:131.62ms | |
step:30/1330 train_time:2633ms step_avg:131.63ms | |
step:31/1330 train_time:2764ms step_avg:131.62ms | |
step:32/1330 train_time:2896ms step_avg:131.62ms | |
step:33/1330 train_time:3029ms step_avg:131.68ms | |
step:34/1330 train_time:3161ms step_avg:131.71ms | |
step:35/1330 train_time:3294ms step_avg:131.75ms | |
step:36/1330 train_time:3425ms step_avg:131.74ms | |
step:37/1330 train_time:3557ms step_avg:131.75ms | |
step:38/1330 train_time:3690ms step_avg:131.79ms | |
step:39/1330 train_time:3820ms step_avg:131.74ms | |
step:40/1330 train_time:3954ms step_avg:131.80ms | |
step:41/1330 train_time:4088ms step_avg:131.88ms | |
step:42/1330 train_time:4221ms step_avg:131.91ms | |
step:43/1330 train_time:4354ms step_avg:131.93ms | |
step:44/1330 train_time:4486ms step_avg:131.93ms | |
step:45/1330 train_time:4618ms step_avg:131.95ms | |
step:46/1330 train_time:4749ms step_avg:131.92ms | |
step:47/1330 train_time:4880ms step_avg:131.90ms | |
step:48/1330 train_time:5012ms step_avg:131.90ms | |
step:49/1330 train_time:5142ms step_avg:131.85ms | |
step:50/1330 train_time:5276ms step_avg:131.90ms | |
step:51/1330 train_time:5409ms step_avg:131.94ms | |
step:52/1330 train_time:5540ms step_avg:131.91ms | |
step:53/1330 train_time:5671ms step_avg:131.89ms | |
step:54/1330 train_time:5802ms step_avg:131.87ms | |
step:55/1330 train_time:5933ms step_avg:131.84ms | |
step:56/1330 train_time:6066ms step_avg:131.87ms | |
step:57/1330 train_time:6198ms step_avg:131.88ms | |
step:58/1330 train_time:6331ms step_avg:131.89ms | |
step:59/1330 train_time:6462ms step_avg:131.89ms | |
step:60/1330 train_time:6594ms step_avg:131.88ms | |
step:61/1330 train_time:6726ms step_avg:131.88ms | |
step:62/1330 train_time:6859ms step_avg:131.89ms | |
step:63/1330 train_time:6991ms step_avg:131.90ms | |
step:64/1330 train_time:7122ms step_avg:131.90ms | |
step:65/1330 train_time:7255ms step_avg:131.90ms | |
step:66/1330 train_time:7388ms step_avg:131.92ms | |
step:67/1330 train_time:7520ms step_avg:131.93ms | |
step:68/1330 train_time:7652ms step_avg:131.94ms | |
step:69/1330 train_time:7783ms step_avg:131.91ms | |
step:70/1330 train_time:7914ms step_avg:131.90ms | |
step:71/1330 train_time:8045ms step_avg:131.88ms | |
step:72/1330 train_time:8177ms step_avg:131.88ms | |
step:73/1330 train_time:8309ms step_avg:131.89ms | |
step:74/1330 train_time:8441ms step_avg:131.89ms | |
step:75/1330 train_time:8574ms step_avg:131.91ms | |
step:76/1330 train_time:8706ms step_avg:131.91ms | |
step:77/1330 train_time:8838ms step_avg:131.91ms | |
step:78/1330 train_time:8971ms step_avg:131.92ms | |
step:79/1330 train_time:9103ms step_avg:131.92ms | |
step:80/1330 train_time:9235ms step_avg:131.93ms | |
step:81/1330 train_time:9367ms step_avg:131.93ms | |
step:82/1330 train_time:9500ms step_avg:131.94ms | |
step:83/1330 train_time:9632ms step_avg:131.94ms | |
step:84/1330 train_time:9764ms step_avg:131.94ms | |
step:85/1330 train_time:9896ms step_avg:131.95ms | |
step:86/1330 train_time:10030ms step_avg:131.97ms | |
step:87/1330 train_time:10162ms step_avg:131.97ms | |
step:88/1330 train_time:10294ms step_avg:131.98ms | |
step:89/1330 train_time:10428ms step_avg:132.00ms | |
step:90/1330 train_time:10560ms step_avg:132.00ms | |
step:91/1330 train_time:10691ms step_avg:131.99ms | |
step:92/1330 train_time:10823ms step_avg:131.99ms | |
step:93/1330 train_time:10955ms step_avg:131.99ms | |
step:94/1330 train_time:11088ms step_avg:132.00ms | |
step:95/1330 train_time:11221ms step_avg:132.02ms | |
step:96/1330 train_time:11354ms step_avg:132.03ms | |
step:97/1330 train_time:11488ms step_avg:132.05ms | |
step:98/1330 train_time:11620ms step_avg:132.04ms | |
step:99/1330 train_time:11751ms step_avg:132.04ms | |
step:100/1330 train_time:11884ms step_avg:132.04ms | |
step:101/1330 train_time:12020ms step_avg:132.09ms | |
step:102/1330 train_time:12156ms step_avg:132.13ms | |
step:103/1330 train_time:12293ms step_avg:132.18ms | |
step:104/1330 train_time:12427ms step_avg:132.20ms | |
step:105/1330 train_time:12561ms step_avg:132.22ms | |
step:106/1330 train_time:12697ms step_avg:132.26ms | |
step:107/1330 train_time:12832ms step_avg:132.29ms | |
step:108/1330 train_time:12967ms step_avg:132.32ms | |
step:109/1330 train_time:13100ms step_avg:132.32ms | |
step:110/1330 train_time:13236ms step_avg:132.36ms | |
step:111/1330 train_time:13374ms step_avg:132.41ms | |
step:112/1330 train_time:13508ms step_avg:132.43ms | |
step:113/1330 train_time:13643ms step_avg:132.45ms | |
step:114/1330 train_time:13779ms step_avg:132.49ms | |
step:115/1330 train_time:13915ms step_avg:132.52ms | |
step:116/1330 train_time:14050ms step_avg:132.55ms | |
step:117/1330 train_time:14183ms step_avg:132.55ms | |
step:118/1330 train_time:14320ms step_avg:132.59ms | |
step:119/1330 train_time:14457ms step_avg:132.63ms | |
step:120/1330 train_time:14593ms step_avg:132.66ms | |
step:121/1330 train_time:14727ms step_avg:132.67ms | |
step:122/1330 train_time:14862ms step_avg:132.69ms | |
step:123/1330 train_time:14996ms step_avg:132.71ms | |
step:124/1330 train_time:15132ms step_avg:132.74ms | |
step:125/1330 train_time:15266ms step_avg:132.75ms | |
step:125/1330 val_loss:4.3119 train_time:15325ms step_avg:133.26ms | |
step:126/1330 train_time:15404ms step_avg:132.79ms | |
step:127/1330 train_time:15548ms step_avg:132.89ms | |
step:128/1330 train_time:15684ms step_avg:132.92ms | |
step:129/1330 train_time:15818ms step_avg:132.92ms | |
step:130/1330 train_time:15952ms step_avg:132.94ms | |
step:131/1330 train_time:16085ms step_avg:132.94ms | |
step:132/1330 train_time:16218ms step_avg:132.94ms | |
step:133/1330 train_time:16355ms step_avg:132.97ms | |
step:134/1330 train_time:16495ms step_avg:133.02ms | |
step:135/1330 train_time:16632ms step_avg:133.06ms | |
step:136/1330 train_time:16770ms step_avg:133.10ms | |
step:137/1330 train_time:16904ms step_avg:133.10ms | |
step:138/1330 train_time:17038ms step_avg:133.11ms | |
step:139/1330 train_time:17170ms step_avg:133.10ms | |
step:140/1330 train_time:17304ms step_avg:133.11ms | |
step:141/1330 train_time:17440ms step_avg:133.13ms | |
step:142/1330 train_time:17576ms step_avg:133.15ms | |
step:143/1330 train_time:17713ms step_avg:133.18ms | |
step:144/1330 train_time:17850ms step_avg:133.21ms | |
step:145/1330 train_time:17984ms step_avg:133.21ms | |
step:146/1330 train_time:18116ms step_avg:133.21ms | |
step:147/1330 train_time:18250ms step_avg:133.21ms | |
step:148/1330 train_time:18384ms step_avg:133.22ms | |
step:149/1330 train_time:18518ms step_avg:133.23ms | |
step:150/1330 train_time:18655ms step_avg:133.25ms | |
step:151/1330 train_time:18793ms step_avg:133.28ms | |
step:152/1330 train_time:18927ms step_avg:133.29ms | |
step:153/1330 train_time:19062ms step_avg:133.30ms | |
step:154/1330 train_time:19197ms step_avg:133.32ms | |
step:155/1330 train_time:19333ms step_avg:133.33ms | |
step:156/1330 train_time:19470ms step_avg:133.35ms | |
step:157/1330 train_time:19603ms step_avg:133.35ms | |
step:158/1330 train_time:19739ms step_avg:133.37ms | |
step:159/1330 train_time:19876ms step_avg:133.39ms | |
step:160/1330 train_time:20012ms step_avg:133.41ms | |
step:161/1330 train_time:20146ms step_avg:133.42ms | |
step:162/1330 train_time:20280ms step_avg:133.42ms | |
step:163/1330 train_time:20414ms step_avg:133.42ms | |
step:164/1330 train_time:20551ms step_avg:133.45ms | |
step:165/1330 train_time:20685ms step_avg:133.45ms | |
step:166/1330 train_time:20819ms step_avg:133.45ms | |
step:167/1330 train_time:20955ms step_avg:133.47ms | |
step:168/1330 train_time:21092ms step_avg:133.49ms | |
step:169/1330 train_time:21226ms step_avg:133.49ms | |
step:170/1330 train_time:21360ms step_avg:133.50ms | |
step:171/1330 train_time:21496ms step_avg:133.51ms | |
step:172/1330 train_time:21632ms step_avg:133.53ms | |
step:173/1330 train_time:21766ms step_avg:133.54ms | |
step:174/1330 train_time:21901ms step_avg:133.55ms | |
step:175/1330 train_time:22036ms step_avg:133.55ms | |
step:176/1330 train_time:22172ms step_avg:133.56ms | |
step:177/1330 train_time:22305ms step_avg:133.56ms | |
step:178/1330 train_time:22439ms step_avg:133.57ms | |
step:179/1330 train_time:22575ms step_avg:133.58ms | |
step:180/1330 train_time:22711ms step_avg:133.60ms | |
step:181/1330 train_time:22846ms step_avg:133.60ms | |
step:182/1330 train_time:22980ms step_avg:133.60ms | |
step:183/1330 train_time:23115ms step_avg:133.61ms | |
step:184/1330 train_time:23251ms step_avg:133.63ms | |
step:185/1330 train_time:23385ms step_avg:133.63ms | |
step:186/1330 train_time:23519ms step_avg:133.63ms | |
step:187/1330 train_time:23656ms step_avg:133.65ms | |
step:188/1330 train_time:23791ms step_avg:133.66ms | |
step:189/1330 train_time:23925ms step_avg:133.66ms | |
step:190/1330 train_time:24059ms step_avg:133.66ms | |
step:191/1330 train_time:24195ms step_avg:133.67ms | |
step:192/1330 train_time:24332ms step_avg:133.69ms | |
step:193/1330 train_time:24466ms step_avg:133.69ms | |
step:194/1330 train_time:24600ms step_avg:133.70ms | |
step:195/1330 train_time:24736ms step_avg:133.71ms | |
step:196/1330 train_time:24873ms step_avg:133.73ms | |
step:197/1330 train_time:25009ms step_avg:133.74ms | |
step:198/1330 train_time:25144ms step_avg:133.74ms | |
step:199/1330 train_time:25280ms step_avg:133.76ms | |
step:200/1330 train_time:25417ms step_avg:133.78ms | |
step:201/1330 train_time:25555ms step_avg:133.80ms | |
step:202/1330 train_time:25693ms step_avg:133.82ms | |
step:203/1330 train_time:25831ms step_avg:133.84ms | |
step:204/1330 train_time:25966ms step_avg:133.85ms | |
step:205/1330 train_time:26102ms step_avg:133.86ms | |
step:206/1330 train_time:26240ms step_avg:133.88ms | |
step:207/1330 train_time:26379ms step_avg:133.90ms | |
step:208/1330 train_time:26516ms step_avg:133.92ms | |
step:209/1330 train_time:26655ms step_avg:133.94ms | |
step:210/1330 train_time:26794ms step_avg:133.97ms | |
step:211/1330 train_time:26931ms step_avg:133.98ms | |
step:212/1330 train_time:27066ms step_avg:133.99ms | |
step:213/1330 train_time:27202ms step_avg:134.00ms | |
step:214/1330 train_time:27339ms step_avg:134.01ms | |
step:215/1330 train_time:27477ms step_avg:134.04ms | |
step:216/1330 train_time:27615ms step_avg:134.05ms | |
step:217/1330 train_time:27753ms step_avg:134.07ms | |
step:218/1330 train_time:27892ms step_avg:134.09ms | |
step:219/1330 train_time:28030ms step_avg:134.11ms | |
step:220/1330 train_time:28166ms step_avg:134.12ms | |
step:221/1330 train_time:28302ms step_avg:134.13ms | |
step:222/1330 train_time:28440ms step_avg:134.15ms | |
step:223/1330 train_time:28578ms step_avg:134.17ms | |
step:224/1330 train_time:28716ms step_avg:134.19ms | |
step:225/1330 train_time:28855ms step_avg:134.21ms | |
step:226/1330 train_time:28994ms step_avg:134.23ms | |
step:227/1330 train_time:29132ms step_avg:134.25ms | |
step:228/1330 train_time:29268ms step_avg:134.26ms | |
step:229/1330 train_time:29404ms step_avg:134.27ms | |
step:230/1330 train_time:29542ms step_avg:134.28ms | |
step:231/1330 train_time:29680ms step_avg:134.30ms | |
step:232/1330 train_time:29817ms step_avg:134.31ms | |
step:233/1330 train_time:29956ms step_avg:134.33ms | |
step:234/1330 train_time:30094ms step_avg:134.35ms | |
step:235/1330 train_time:30233ms step_avg:134.37ms | |
step:236/1330 train_time:30369ms step_avg:134.38ms | |
step:237/1330 train_time:30506ms step_avg:134.39ms | |
step:238/1330 train_time:30643ms step_avg:134.40ms | |
step:239/1330 train_time:30781ms step_avg:134.41ms | |
step:240/1330 train_time:30920ms step_avg:134.43ms | |
step:241/1330 train_time:31059ms step_avg:134.45ms | |
step:242/1330 train_time:31197ms step_avg:134.47ms | |
step:243/1330 train_time:31335ms step_avg:134.49ms | |
step:244/1330 train_time:31474ms step_avg:134.50ms | |
step:245/1330 train_time:31611ms step_avg:134.52ms | |
step:246/1330 train_time:31747ms step_avg:134.52ms | |
step:247/1330 train_time:31882ms step_avg:134.52ms | |
step:248/1330 train_time:32020ms step_avg:134.54ms | |
step:249/1330 train_time:32159ms step_avg:134.56ms | |
step:250/1330 train_time:32298ms step_avg:134.58ms | |
step:250/1330 val_loss:3.9266 train_time:32360ms step_avg:134.83ms | |
step:251/1330 train_time:32439ms step_avg:134.60ms | |
step:252/1330 train_time:32582ms step_avg:134.64ms | |
step:253/1330 train_time:32720ms step_avg:134.65ms | |
step:254/1330 train_time:32856ms step_avg:134.65ms | |
step:255/1330 train_time:32991ms step_avg:134.66ms | |
step:256/1330 train_time:33126ms step_avg:134.66ms | |
step:257/1330 train_time:33262ms step_avg:134.67ms | |
step:258/1330 train_time:33401ms step_avg:134.68ms | |
step:259/1330 train_time:33542ms step_avg:134.71ms | |
step:260/1330 train_time:33680ms step_avg:134.72ms | |
step:261/1330 train_time:33817ms step_avg:134.73ms | |
step:262/1330 train_time:33956ms step_avg:134.75ms | |
step:263/1330 train_time:34093ms step_avg:134.76ms | |
step:264/1330 train_time:34230ms step_avg:134.76ms | |
step:265/1330 train_time:34366ms step_avg:134.77ms | |
step:266/1330 train_time:34504ms step_avg:134.78ms | |
step:267/1330 train_time:34641ms step_avg:134.79ms | |
step:268/1330 train_time:34778ms step_avg:134.80ms | |
step:269/1330 train_time:34917ms step_avg:134.81ms | |
step:270/1330 train_time:35056ms step_avg:134.83ms | |
step:271/1330 train_time:35193ms step_avg:134.84ms | |
step:272/1330 train_time:35332ms step_avg:134.85ms | |
step:273/1330 train_time:35469ms step_avg:134.86ms | |
step:274/1330 train_time:35606ms step_avg:134.87ms | |
step:275/1330 train_time:35741ms step_avg:134.87ms | |
step:276/1330 train_time:35879ms step_avg:134.88ms | |
step:277/1330 train_time:36017ms step_avg:134.90ms | |
step:278/1330 train_time:36155ms step_avg:134.91ms | |
step:279/1330 train_time:36294ms step_avg:134.92ms | |
step:280/1330 train_time:36433ms step_avg:134.94ms | |
step:281/1330 train_time:36571ms step_avg:134.95ms | |
step:282/1330 train_time:36709ms step_avg:134.96ms | |
step:283/1330 train_time:36847ms step_avg:134.97ms | |
step:284/1330 train_time:36984ms step_avg:134.98ms | |
step:285/1330 train_time:37122ms step_avg:134.99ms | |
step:286/1330 train_time:37260ms step_avg:135.00ms | |
step:287/1330 train_time:37400ms step_avg:135.02ms | |
step:288/1330 train_time:37539ms step_avg:135.03ms | |
step:289/1330 train_time:37677ms step_avg:135.04ms | |
step:290/1330 train_time:37815ms step_avg:135.05ms | |
step:291/1330 train_time:37952ms step_avg:135.06ms | |
step:292/1330 train_time:38090ms step_avg:135.07ms | |
step:293/1330 train_time:38227ms step_avg:135.08ms | |
step:294/1330 train_time:38365ms step_avg:135.09ms | |
step:295/1330 train_time:38503ms step_avg:135.10ms | |
step:296/1330 train_time:38639ms step_avg:135.10ms | |
step:297/1330 train_time:38778ms step_avg:135.12ms | |
step:298/1330 train_time:38918ms step_avg:135.13ms | |
step:299/1330 train_time:39058ms step_avg:135.15ms | |
step:300/1330 train_time:39198ms step_avg:135.17ms | |
step:301/1330 train_time:39339ms step_avg:135.19ms | |
step:302/1330 train_time:39479ms step_avg:135.20ms | |
step:303/1330 train_time:39619ms step_avg:135.22ms | |
step:304/1330 train_time:39758ms step_avg:135.23ms | |
step:305/1330 train_time:39897ms step_avg:135.25ms | |
step:306/1330 train_time:40037ms step_avg:135.26ms | |
step:307/1330 train_time:40176ms step_avg:135.27ms | |
step:308/1330 train_time:40317ms step_avg:135.29ms | |
step:309/1330 train_time:40457ms step_avg:135.31ms | |
step:310/1330 train_time:40597ms step_avg:135.32ms | |
step:311/1330 train_time:40737ms step_avg:135.34ms | |
step:312/1330 train_time:40877ms step_avg:135.35ms | |
step:313/1330 train_time:41018ms step_avg:135.37ms | |
step:314/1330 train_time:41157ms step_avg:135.39ms | |
step:315/1330 train_time:41297ms step_avg:135.40ms | |
step:316/1330 train_time:41437ms step_avg:135.41ms | |
step:317/1330 train_time:41577ms step_avg:135.43ms | |
step:318/1330 train_time:41717ms step_avg:135.44ms | |
step:319/1330 train_time:41856ms step_avg:135.46ms | |
step:320/1330 train_time:41996ms step_avg:135.47ms | |
step:321/1330 train_time:42137ms step_avg:135.49ms | |
step:322/1330 train_time:42277ms step_avg:135.50ms | |
step:323/1330 train_time:42418ms step_avg:135.52ms | |
step:324/1330 train_time:42558ms step_avg:135.53ms | |
step:325/1330 train_time:42698ms step_avg:135.55ms | |
step:326/1330 train_time:42838ms step_avg:135.56ms | |
step:327/1330 train_time:42978ms step_avg:135.58ms | |
step:328/1330 train_time:43119ms step_avg:135.59ms | |
step:329/1330 train_time:43259ms step_avg:135.61ms | |
step:330/1330 train_time:43399ms step_avg:135.62ms | |
step:331/1330 train_time:43540ms step_avg:135.64ms | |
step:332/1330 train_time:43679ms step_avg:135.65ms | |
step:333/1330 train_time:43819ms step_avg:135.66ms | |
step:334/1330 train_time:43960ms step_avg:135.68ms | |
step:335/1330 train_time:44100ms step_avg:135.69ms | |
step:336/1330 train_time:44239ms step_avg:135.70ms | |
step:337/1330 train_time:44377ms step_avg:135.71ms | |
step:338/1330 train_time:44517ms step_avg:135.72ms | |
step:339/1330 train_time:44657ms step_avg:135.74ms | |
step:340/1330 train_time:44797ms step_avg:135.75ms | |
step:341/1330 train_time:44938ms step_avg:135.76ms | |
step:342/1330 train_time:45077ms step_avg:135.78ms | |
step:343/1330 train_time:45218ms step_avg:135.79ms | |
step:344/1330 train_time:45359ms step_avg:135.81ms | |
step:345/1330 train_time:45500ms step_avg:135.82ms | |
step:346/1330 train_time:45640ms step_avg:135.83ms | |
step:347/1330 train_time:45780ms step_avg:135.84ms | |
step:348/1330 train_time:45920ms step_avg:135.86ms | |
step:349/1330 train_time:46061ms step_avg:135.87ms | |
step:350/1330 train_time:46201ms step_avg:135.89ms | |
step:351/1330 train_time:46341ms step_avg:135.90ms | |
step:352/1330 train_time:46481ms step_avg:135.91ms | |
step:353/1330 train_time:46622ms step_avg:135.92ms | |
step:354/1330 train_time:46761ms step_avg:135.93ms | |
step:355/1330 train_time:46902ms step_avg:135.95ms | |
step:356/1330 train_time:47042ms step_avg:135.96ms | |
step:357/1330 train_time:47181ms step_avg:135.97ms | |
step:358/1330 train_time:47321ms step_avg:135.98ms | |
step:359/1330 train_time:47462ms step_avg:135.99ms | |
step:360/1330 train_time:47601ms step_avg:136.00ms | |
step:361/1330 train_time:47740ms step_avg:136.01ms | |
step:362/1330 train_time:47880ms step_avg:136.02ms | |
step:363/1330 train_time:48018ms step_avg:136.03ms | |
step:364/1330 train_time:48159ms step_avg:136.04ms | |
step:365/1330 train_time:48299ms step_avg:136.05ms | |
step:366/1330 train_time:48440ms step_avg:136.07ms | |
step:367/1330 train_time:48579ms step_avg:136.08ms | |
step:368/1330 train_time:48719ms step_avg:136.09ms | |
step:369/1330 train_time:48859ms step_avg:136.10ms | |
step:370/1330 train_time:48998ms step_avg:136.10ms | |
step:371/1330 train_time:49138ms step_avg:136.12ms | |
step:372/1330 train_time:49277ms step_avg:136.12ms | |
step:373/1330 train_time:49417ms step_avg:136.13ms | |
step:374/1330 train_time:49558ms step_avg:136.15ms | |
step:375/1330 train_time:49698ms step_avg:136.16ms | |
step:375/1330 val_loss:3.7501 train_time:49761ms step_avg:136.33ms | |
step:376/1330 train_time:49842ms step_avg:136.18ms | |
step:377/1330 train_time:49986ms step_avg:136.20ms | |
step:378/1330 train_time:50125ms step_avg:136.21ms | |
step:379/1330 train_time:50263ms step_avg:136.22ms | |
step:380/1330 train_time:50402ms step_avg:136.22ms | |
step:381/1330 train_time:50541ms step_avg:136.23ms | |
step:382/1330 train_time:50681ms step_avg:136.24ms | |
step:383/1330 train_time:50822ms step_avg:136.25ms | |
step:384/1330 train_time:50964ms step_avg:136.27ms | |
step:385/1330 train_time:51105ms step_avg:136.28ms | |
step:386/1330 train_time:51244ms step_avg:136.29ms | |
step:387/1330 train_time:51382ms step_avg:136.29ms | |
step:388/1330 train_time:51522ms step_avg:136.30ms | |
step:389/1330 train_time:51661ms step_avg:136.31ms | |
step:390/1330 train_time:51803ms step_avg:136.32ms | |
step:391/1330 train_time:51944ms step_avg:136.33ms | |
step:392/1330 train_time:52085ms step_avg:136.35ms | |
step:393/1330 train_time:52224ms step_avg:136.36ms | |
step:394/1330 train_time:52363ms step_avg:136.36ms | |
step:395/1330 train_time:52502ms step_avg:136.37ms | |
step:396/1330 train_time:52643ms step_avg:136.38ms | |
step:397/1330 train_time:52786ms step_avg:136.40ms | |
step:398/1330 train_time:52929ms step_avg:136.41ms | |
step:399/1330 train_time:53071ms step_avg:136.43ms | |
step:400/1330 train_time:53212ms step_avg:136.44ms | |
step:401/1330 train_time:53353ms step_avg:136.45ms | |
step:402/1330 train_time:53493ms step_avg:136.46ms | |
step:403/1330 train_time:53634ms step_avg:136.47ms | |
step:404/1330 train_time:53774ms step_avg:136.48ms | |
step:405/1330 train_time:53915ms step_avg:136.49ms | |
step:406/1330 train_time:54058ms step_avg:136.51ms | |
step:407/1330 train_time:54201ms step_avg:136.53ms | |
step:408/1330 train_time:54343ms step_avg:136.54ms | |
step:409/1330 train_time:54484ms step_avg:136.55ms | |
step:410/1330 train_time:54625ms step_avg:136.56ms | |
step:411/1330 train_time:54765ms step_avg:136.57ms | |
step:412/1330 train_time:54907ms step_avg:136.58ms | |
step:413/1330 train_time:55049ms step_avg:136.60ms | |
step:414/1330 train_time:55190ms step_avg:136.61ms | |
step:415/1330 train_time:55332ms step_avg:136.62ms | |
step:416/1330 train_time:55471ms step_avg:136.63ms | |
step:417/1330 train_time:55612ms step_avg:136.64ms | |
step:418/1330 train_time:55752ms step_avg:136.65ms | |
step:419/1330 train_time:55894ms step_avg:136.66ms | |
step:420/1330 train_time:56036ms step_avg:136.67ms | |
step:421/1330 train_time:56179ms step_avg:136.69ms | |
step:422/1330 train_time:56322ms step_avg:136.70ms | |
step:423/1330 train_time:56464ms step_avg:136.72ms | |
step:424/1330 train_time:56605ms step_avg:136.73ms | |
step:425/1330 train_time:56747ms step_avg:136.74ms | |
step:426/1330 train_time:56889ms step_avg:136.75ms | |
step:427/1330 train_time:57031ms step_avg:136.76ms | |
step:428/1330 train_time:57172ms step_avg:136.78ms | |
step:429/1330 train_time:57315ms step_avg:136.79ms | |
step:430/1330 train_time:57458ms step_avg:136.80ms | |
step:431/1330 train_time:57600ms step_avg:136.82ms | |
step:432/1330 train_time:57741ms step_avg:136.83ms | |
step:433/1330 train_time:57883ms step_avg:136.84ms | |
step:434/1330 train_time:58024ms step_avg:136.85ms | |
step:435/1330 train_time:58166ms step_avg:136.86ms | |
step:436/1330 train_time:58308ms step_avg:136.87ms | |
step:437/1330 train_time:58450ms step_avg:136.88ms | |
step:438/1330 train_time:58590ms step_avg:136.89ms | |
step:439/1330 train_time:58731ms step_avg:136.90ms | |
step:440/1330 train_time:58872ms step_avg:136.91ms | |
step:441/1330 train_time:59012ms step_avg:136.92ms | |
step:442/1330 train_time:59153ms step_avg:136.93ms | |
step:443/1330 train_time:59295ms step_avg:136.94ms | |
step:444/1330 train_time:59437ms step_avg:136.95ms | |
step:445/1330 train_time:59580ms step_avg:136.96ms | |
step:446/1330 train_time:59722ms step_avg:136.98ms | |
step:447/1330 train_time:59864ms step_avg:136.99ms | |
step:448/1330 train_time:60005ms step_avg:137.00ms | |
step:449/1330 train_time:60146ms step_avg:137.01ms | |
step:450/1330 train_time:60287ms step_avg:137.02ms | |
step:451/1330 train_time:60430ms step_avg:137.03ms | |
step:452/1330 train_time:60574ms step_avg:137.05ms | |
step:453/1330 train_time:60716ms step_avg:137.06ms | |
step:454/1330 train_time:60859ms step_avg:137.07ms | |
step:455/1330 train_time:61002ms step_avg:137.08ms | |
step:456/1330 train_time:61143ms step_avg:137.09ms | |
step:457/1330 train_time:61285ms step_avg:137.10ms | |
step:458/1330 train_time:61426ms step_avg:137.11ms | |
step:459/1330 train_time:61570ms step_avg:137.13ms | |
step:460/1330 train_time:61713ms step_avg:137.14ms | |
step:461/1330 train_time:61855ms step_avg:137.15ms | |
step:462/1330 train_time:61996ms step_avg:137.16ms | |
step:463/1330 train_time:62138ms step_avg:137.17ms | |
step:464/1330 train_time:62280ms step_avg:137.18ms | |
step:465/1330 train_time:62421ms step_avg:137.19ms | |
step:466/1330 train_time:62564ms step_avg:137.20ms | |
step:467/1330 train_time:62707ms step_avg:137.21ms | |
step:468/1330 train_time:62848ms step_avg:137.22ms | |
step:469/1330 train_time:62989ms step_avg:137.23ms | |
step:470/1330 train_time:63130ms step_avg:137.24ms | |
step:471/1330 train_time:63271ms step_avg:137.25ms | |
step:472/1330 train_time:63414ms step_avg:137.26ms | |
step:473/1330 train_time:63555ms step_avg:137.27ms | |
step:474/1330 train_time:63697ms step_avg:137.28ms | |
step:475/1330 train_time:63838ms step_avg:137.29ms | |
step:476/1330 train_time:63978ms step_avg:137.29ms | |
step:477/1330 train_time:64119ms step_avg:137.30ms | |
step:478/1330 train_time:64261ms step_avg:137.31ms | |
step:479/1330 train_time:64402ms step_avg:137.32ms | |
step:480/1330 train_time:64544ms step_avg:137.33ms | |
step:481/1330 train_time:64685ms step_avg:137.34ms | |
step:482/1330 train_time:64827ms step_avg:137.35ms | |
step:483/1330 train_time:64970ms step_avg:137.36ms | |
step:484/1330 train_time:65112ms step_avg:137.37ms | |
step:485/1330 train_time:65253ms step_avg:137.38ms | |
step:486/1330 train_time:65393ms step_avg:137.38ms | |
step:487/1330 train_time:65534ms step_avg:137.39ms | |
step:488/1330 train_time:65676ms step_avg:137.40ms | |
step:489/1330 train_time:65818ms step_avg:137.41ms | |
step:490/1330 train_time:65959ms step_avg:137.42ms | |
step:491/1330 train_time:66101ms step_avg:137.42ms | |
step:492/1330 train_time:66242ms step_avg:137.43ms | |
step:493/1330 train_time:66383ms step_avg:137.44ms | |
step:494/1330 train_time:66524ms step_avg:137.45ms | |
step:495/1330 train_time:66668ms step_avg:137.46ms | |
step:496/1330 train_time:66812ms step_avg:137.47ms | |
step:497/1330 train_time:66956ms step_avg:137.49ms | |
step:498/1330 train_time:67099ms step_avg:137.50ms | |
step:499/1330 train_time:67242ms step_avg:137.51ms | |
step:500/1330 train_time:67384ms step_avg:137.52ms | |
step:500/1330 val_loss:3.6338 train_time:67449ms step_avg:137.65ms | |
step:501/1330 train_time:67529ms step_avg:137.53ms | |
step:502/1330 train_time:67675ms step_avg:137.55ms | |
step:503/1330 train_time:67819ms step_avg:137.56ms | |
step:504/1330 train_time:67961ms step_avg:137.57ms | |
step:505/1330 train_time:68104ms step_avg:137.58ms | |
step:506/1330 train_time:68246ms step_avg:137.59ms | |
step:507/1330 train_time:68387ms step_avg:137.60ms | |
step:508/1330 train_time:68533ms step_avg:137.62ms | |
step:509/1330 train_time:68680ms step_avg:137.63ms | |
step:510/1330 train_time:68824ms step_avg:137.65ms | |
step:511/1330 train_time:68966ms step_avg:137.66ms | |
step:512/1330 train_time:69109ms step_avg:137.67ms | |
step:513/1330 train_time:69250ms step_avg:137.67ms | |
step:514/1330 train_time:69394ms step_avg:137.69ms | |
step:515/1330 train_time:69537ms step_avg:137.70ms | |
step:516/1330 train_time:69683ms step_avg:137.71ms | |
step:517/1330 train_time:69826ms step_avg:137.72ms | |
step:518/1330 train_time:69967ms step_avg:137.73ms | |
step:519/1330 train_time:70109ms step_avg:137.74ms | |
step:520/1330 train_time:70252ms step_avg:137.75ms | |
step:521/1330 train_time:70394ms step_avg:137.76ms | |
step:522/1330 train_time:70539ms step_avg:137.77ms | |
step:523/1330 train_time:70684ms step_avg:137.78ms | |
step:524/1330 train_time:70827ms step_avg:137.80ms | |
step:525/1330 train_time:70970ms step_avg:137.81ms | |
step:526/1330 train_time:71112ms step_avg:137.81ms | |
step:527/1330 train_time:71255ms step_avg:137.82ms | |
step:528/1330 train_time:71397ms step_avg:137.83ms | |
step:529/1330 train_time:71541ms step_avg:137.84ms | |
step:530/1330 train_time:71686ms step_avg:137.86ms | |
step:531/1330 train_time:71830ms step_avg:137.87ms | |
step:532/1330 train_time:71973ms step_avg:137.88ms | |
step:533/1330 train_time:72114ms step_avg:137.89ms | |
step:534/1330 train_time:72256ms step_avg:137.89ms | |
step:535/1330 train_time:72398ms step_avg:137.90ms | |
step:536/1330 train_time:72542ms step_avg:137.91ms | |
step:537/1330 train_time:72686ms step_avg:137.92ms | |
step:538/1330 train_time:72828ms step_avg:137.93ms | |
step:539/1330 train_time:72971ms step_avg:137.94ms | |
step:540/1330 train_time:73112ms step_avg:137.95ms | |
step:541/1330 train_time:73254ms step_avg:137.95ms | |
step:542/1330 train_time:73399ms step_avg:137.97ms | |
step:543/1330 train_time:73543ms step_avg:137.98ms | |
step:544/1330 train_time:73686ms step_avg:137.99ms | |
step:545/1330 train_time:73830ms step_avg:138.00ms | |
step:546/1330 train_time:73973ms step_avg:138.01ms | |
step:547/1330 train_time:74116ms step_avg:138.02ms | |
step:548/1330 train_time:74259ms step_avg:138.03ms | |
step:549/1330 train_time:74403ms step_avg:138.04ms | |
step:550/1330 train_time:74548ms step_avg:138.05ms | |
step:551/1330 train_time:74689ms step_avg:138.06ms | |
step:552/1330 train_time:74835ms step_avg:138.07ms | |
step:553/1330 train_time:74982ms step_avg:138.09ms | |
step:554/1330 train_time:75124ms step_avg:138.10ms | |
step:555/1330 train_time:75267ms step_avg:138.10ms | |
step:556/1330 train_time:75410ms step_avg:138.11ms | |
step:557/1330 train_time:75555ms step_avg:138.13ms | |
step:558/1330 train_time:75698ms step_avg:138.13ms | |
step:559/1330 train_time:75839ms step_avg:138.14ms | |
step:560/1330 train_time:75983ms step_avg:138.15ms | |
step:561/1330 train_time:76126ms step_avg:138.16ms | |
step:562/1330 train_time:76269ms step_avg:138.17ms | |
step:563/1330 train_time:76411ms step_avg:138.18ms | |
step:564/1330 train_time:76553ms step_avg:138.18ms | |
step:565/1330 train_time:76697ms step_avg:138.19ms | |
step:566/1330 train_time:76842ms step_avg:138.20ms | |
step:567/1330 train_time:76986ms step_avg:138.21ms | |
step:568/1330 train_time:77129ms step_avg:138.22ms | |
step:569/1330 train_time:77272ms step_avg:138.23ms | |
step:570/1330 train_time:77414ms step_avg:138.24ms | |
step:571/1330 train_time:77557ms step_avg:138.25ms | |
step:572/1330 train_time:77701ms step_avg:138.26ms | |
step:573/1330 train_time:77844ms step_avg:138.27ms | |
step:574/1330 train_time:77987ms step_avg:138.28ms | |
step:575/1330 train_time:78132ms step_avg:138.29ms | |
step:576/1330 train_time:78275ms step_avg:138.29ms | |
step:577/1330 train_time:78417ms step_avg:138.30ms | |
step:578/1330 train_time:78559ms step_avg:138.31ms | |
step:579/1330 train_time:78703ms step_avg:138.32ms | |
step:580/1330 train_time:78845ms step_avg:138.32ms | |
step:581/1330 train_time:78988ms step_avg:138.33ms | |
step:582/1330 train_time:79130ms step_avg:138.34ms | |
step:583/1330 train_time:79273ms step_avg:138.35ms | |
step:584/1330 train_time:79416ms step_avg:138.36ms | |
step:585/1330 train_time:79559ms step_avg:138.36ms | |
step:586/1330 train_time:79702ms step_avg:138.37ms | |
step:587/1330 train_time:79844ms step_avg:138.38ms | |
step:588/1330 train_time:79987ms step_avg:138.39ms | |
step:589/1330 train_time:80131ms step_avg:138.40ms | |
step:590/1330 train_time:80274ms step_avg:138.40ms | |
step:591/1330 train_time:80418ms step_avg:138.41ms | |
step:592/1330 train_time:80561ms step_avg:138.42ms | |
step:593/1330 train_time:80706ms step_avg:138.43ms | |
step:594/1330 train_time:80850ms step_avg:138.44ms | |
step:595/1330 train_time:80995ms step_avg:138.45ms | |
step:596/1330 train_time:81140ms step_avg:138.46ms | |
step:597/1330 train_time:81285ms step_avg:138.47ms | |
step:598/1330 train_time:81428ms step_avg:138.48ms | |
step:599/1330 train_time:81575ms step_avg:138.50ms | |
step:600/1330 train_time:81721ms step_avg:138.51ms | |
step:601/1330 train_time:81865ms step_avg:138.52ms | |
step:602/1330 train_time:82008ms step_avg:138.53ms | |
step:603/1330 train_time:82154ms step_avg:138.54ms | |
step:604/1330 train_time:82298ms step_avg:138.55ms | |
step:605/1330 train_time:82445ms step_avg:138.56ms | |
step:606/1330 train_time:82590ms step_avg:138.57ms | |
step:607/1330 train_time:82736ms step_avg:138.59ms | |
step:608/1330 train_time:82880ms step_avg:138.60ms | |
step:609/1330 train_time:83023ms step_avg:138.60ms | |
step:610/1330 train_time:83167ms step_avg:138.61ms | |
step:611/1330 train_time:83313ms step_avg:138.62ms | |
step:612/1330 train_time:83459ms step_avg:138.64ms | |
step:613/1330 train_time:83604ms step_avg:138.65ms | |
step:614/1330 train_time:83748ms step_avg:138.66ms | |
step:615/1330 train_time:83891ms step_avg:138.66ms | |
step:616/1330 train_time:84036ms step_avg:138.67ms | |
step:617/1330 train_time:84181ms step_avg:138.68ms | |
step:618/1330 train_time:84325ms step_avg:138.69ms | |
step:619/1330 train_time:84472ms step_avg:138.71ms | |
step:620/1330 train_time:84619ms step_avg:138.72ms | |
step:621/1330 train_time:84763ms step_avg:138.73ms | |
step:622/1330 train_time:84907ms step_avg:138.74ms | |
step:623/1330 train_time:85051ms step_avg:138.75ms | |
step:624/1330 train_time:85194ms step_avg:138.75ms | |
step:625/1330 train_time:85339ms step_avg:138.76ms | |
step:625/1330 val_loss:3.5604 train_time:85406ms step_avg:138.87ms | |
step:626/1330 train_time:85488ms step_avg:138.78ms | |
step:627/1330 train_time:85638ms step_avg:138.80ms | |
step:628/1330 train_time:85782ms step_avg:138.81ms | |
step:629/1330 train_time:85925ms step_avg:138.81ms | |
step:630/1330 train_time:86067ms step_avg:138.82ms | |
step:631/1330 train_time:86212ms step_avg:138.83ms | |
step:632/1330 train_time:86356ms step_avg:138.84ms | |
step:633/1330 train_time:86503ms step_avg:138.85ms | |
step:634/1330 train_time:86649ms step_avg:138.86ms | |
step:635/1330 train_time:86793ms step_avg:138.87ms | |
step:636/1330 train_time:86937ms step_avg:138.88ms | |
step:637/1330 train_time:87078ms step_avg:138.88ms | |
step:638/1330 train_time:87221ms step_avg:138.89ms | |
step:639/1330 train_time:87365ms step_avg:138.89ms | |
step:640/1330 train_time:87509ms step_avg:138.90ms | |
step:641/1330 train_time:87654ms step_avg:138.91ms | |
step:642/1330 train_time:87799ms step_avg:138.92ms | |
step:643/1330 train_time:87945ms step_avg:138.93ms | |
step:644/1330 train_time:88090ms step_avg:138.94ms | |
step:645/1330 train_time:88235ms step_avg:138.95ms | |
step:646/1330 train_time:88379ms step_avg:138.96ms | |
step:647/1330 train_time:88523ms step_avg:138.97ms | |
step:648/1330 train_time:88669ms step_avg:138.98ms | |
step:649/1330 train_time:88814ms step_avg:138.99ms | |
step:650/1330 train_time:88962ms step_avg:139.00ms | |
step:651/1330 train_time:89106ms step_avg:139.01ms | |
step:652/1330 train_time:89249ms step_avg:139.02ms | |
step:653/1330 train_time:89393ms step_avg:139.03ms | |
step:654/1330 train_time:89538ms step_avg:139.03ms | |
step:655/1330 train_time:89681ms step_avg:139.04ms | |
step:656/1330 train_time:89825ms step_avg:139.05ms | |
step:657/1330 train_time:89971ms step_avg:139.06ms | |
step:658/1330 train_time:90117ms step_avg:139.07ms | |
step:659/1330 train_time:90261ms step_avg:139.08ms | |
step:660/1330 train_time:90405ms step_avg:139.09ms | |
step:661/1330 train_time:90553ms step_avg:139.10ms | |
step:662/1330 train_time:90696ms step_avg:139.10ms | |
step:663/1330 train_time:90840ms step_avg:139.11ms | |
step:664/1330 train_time:90983ms step_avg:139.12ms | |
step:665/1330 train_time:91128ms step_avg:139.13ms | |
step:666/1330 train_time:91273ms step_avg:139.14ms | |
step:667/1330 train_time:91419ms step_avg:139.15ms | |
step:668/1330 train_time:91563ms step_avg:139.15ms | |
step:669/1330 train_time:91711ms step_avg:139.17ms | |
step:670/1330 train_time:91856ms step_avg:139.18ms | |
step:671/1330 train_time:92000ms step_avg:139.18ms | |
step:672/1330 train_time:92144ms step_avg:139.19ms | |
step:673/1330 train_time:92290ms step_avg:139.20ms | |
step:674/1330 train_time:92436ms step_avg:139.21ms | |
step:675/1330 train_time:92579ms step_avg:139.22ms | |
step:676/1330 train_time:92722ms step_avg:139.22ms | |
step:677/1330 train_time:92866ms step_avg:139.23ms | |
step:678/1330 train_time:93011ms step_avg:139.24ms | |
step:679/1330 train_time:93154ms step_avg:139.24ms | |
step:680/1330 train_time:93302ms step_avg:139.26ms | |
step:681/1330 train_time:93446ms step_avg:139.26ms | |
step:682/1330 train_time:93591ms step_avg:139.27ms | |
step:683/1330 train_time:93736ms step_avg:139.28ms | |
step:684/1330 train_time:93882ms step_avg:139.29ms | |
step:685/1330 train_time:94028ms step_avg:139.30ms | |
step:686/1330 train_time:94174ms step_avg:139.31ms | |
step:687/1330 train_time:94317ms step_avg:139.32ms | |
step:688/1330 train_time:94462ms step_avg:139.32ms | |
step:689/1330 train_time:94606ms step_avg:139.33ms | |
step:690/1330 train_time:94752ms step_avg:139.34ms | |
step:691/1330 train_time:94898ms step_avg:139.35ms | |
step:692/1330 train_time:95043ms step_avg:139.36ms | |
step:693/1330 train_time:95188ms step_avg:139.37ms | |
step:694/1330 train_time:95332ms step_avg:139.37ms | |
step:695/1330 train_time:95476ms step_avg:139.38ms | |
step:696/1330 train_time:95622ms step_avg:139.39ms | |
step:697/1330 train_time:95768ms step_avg:139.40ms | |
step:698/1330 train_time:95914ms step_avg:139.41ms | |
step:699/1330 train_time:96061ms step_avg:139.42ms | |
step:700/1330 train_time:96204ms step_avg:139.43ms | |
step:701/1330 train_time:96350ms step_avg:139.44ms | |
step:702/1330 train_time:96497ms step_avg:139.45ms | |
step:703/1330 train_time:96644ms step_avg:139.46ms | |
step:704/1330 train_time:96789ms step_avg:139.47ms | |
step:705/1330 train_time:96937ms step_avg:139.48ms | |
step:706/1330 train_time:97083ms step_avg:139.49ms | |
step:707/1330 train_time:97227ms step_avg:139.49ms | |
step:708/1330 train_time:97374ms step_avg:139.50ms | |
step:709/1330 train_time:97520ms step_avg:139.51ms | |
step:710/1330 train_time:97664ms step_avg:139.52ms | |
step:711/1330 train_time:97810ms step_avg:139.53ms | |
step:712/1330 train_time:97959ms step_avg:139.54ms | |
step:713/1330 train_time:98105ms step_avg:139.55ms | |
step:714/1330 train_time:98249ms step_avg:139.56ms | |
step:715/1330 train_time:98394ms step_avg:139.57ms | |
step:716/1330 train_time:98539ms step_avg:139.57ms | |
step:717/1330 train_time:98684ms step_avg:139.58ms | |
step:718/1330 train_time:98830ms step_avg:139.59ms | |
step:719/1330 train_time:98977ms step_avg:139.60ms | |
step:720/1330 train_time:99126ms step_avg:139.61ms | |
step:721/1330 train_time:99270ms step_avg:139.62ms | |
step:722/1330 train_time:99414ms step_avg:139.63ms | |
step:723/1330 train_time:99559ms step_avg:139.63ms | |
step:724/1330 train_time:99705ms step_avg:139.64ms | |
step:725/1330 train_time:99852ms step_avg:139.65ms | |
step:726/1330 train_time:99998ms step_avg:139.66ms | |
step:727/1330 train_time:100144ms step_avg:139.67ms | |
step:728/1330 train_time:100291ms step_avg:139.68ms | |
step:729/1330 train_time:100439ms step_avg:139.69ms | |
step:730/1330 train_time:100588ms step_avg:139.70ms | |
step:731/1330 train_time:100731ms step_avg:139.71ms | |
step:732/1330 train_time:100875ms step_avg:139.72ms | |
step:733/1330 train_time:101020ms step_avg:139.72ms | |
step:734/1330 train_time:101165ms step_avg:139.73ms | |
step:735/1330 train_time:101312ms step_avg:139.74ms | |
step:736/1330 train_time:101460ms step_avg:139.75ms | |
step:737/1330 train_time:101604ms step_avg:139.76ms | |
step:738/1330 train_time:101748ms step_avg:139.76ms | |
step:739/1330 train_time:101894ms step_avg:139.77ms | |
step:740/1330 train_time:102041ms step_avg:139.78ms | |
step:741/1330 train_time:102183ms step_avg:139.79ms | |
step:742/1330 train_time:102332ms step_avg:139.80ms | |
step:743/1330 train_time:102478ms step_avg:139.81ms | |
step:744/1330 train_time:102624ms step_avg:139.81ms | |
step:745/1330 train_time:102771ms step_avg:139.83ms | |
step:746/1330 train_time:102915ms step_avg:139.83ms | |
step:747/1330 train_time:103062ms step_avg:139.84ms | |
step:748/1330 train_time:103208ms step_avg:139.85ms | |
step:749/1330 train_time:103354ms step_avg:139.86ms | |
step:750/1330 train_time:103501ms step_avg:139.87ms | |
step:750/1330 val_loss:3.5084 train_time:103568ms step_avg:139.96ms | |
step:751/1330 train_time:103649ms step_avg:139.88ms | |
step:752/1330 train_time:103797ms step_avg:139.89ms | |
step:753/1330 train_time:103943ms step_avg:139.90ms | |
step:754/1330 train_time:104087ms step_avg:139.90ms | |
step:755/1330 train_time:104232ms step_avg:139.91ms | |
step:756/1330 train_time:104377ms step_avg:139.92ms | |
step:757/1330 train_time:104527ms step_avg:139.93ms | |
step:758/1330 train_time:104675ms step_avg:139.94ms | |
step:759/1330 train_time:104823ms step_avg:139.95ms | |
step:760/1330 train_time:104967ms step_avg:139.96ms | |
step:761/1330 train_time:105112ms step_avg:139.96ms | |
step:762/1330 train_time:105256ms step_avg:139.97ms | |
step:763/1330 train_time:105400ms step_avg:139.97ms | |
step:764/1330 train_time:105544ms step_avg:139.98ms | |
step:765/1330 train_time:105689ms step_avg:139.99ms | |
step:766/1330 train_time:105838ms step_avg:140.00ms | |
step:767/1330 train_time:105982ms step_avg:140.00ms | |
step:768/1330 train_time:106127ms step_avg:140.01ms | |
step:769/1330 train_time:106272ms step_avg:140.02ms | |
step:770/1330 train_time:106418ms step_avg:140.02ms | |
step:771/1330 train_time:106564ms step_avg:140.03ms | |
step:772/1330 train_time:106710ms step_avg:140.04ms | |
step:773/1330 train_time:106857ms step_avg:140.05ms | |
step:774/1330 train_time:107002ms step_avg:140.06ms | |
step:775/1330 train_time:107146ms step_avg:140.06ms | |
step:776/1330 train_time:107293ms step_avg:140.07ms | |
step:777/1330 train_time:107438ms step_avg:140.08ms | |
step:778/1330 train_time:107581ms step_avg:140.08ms | |
step:779/1330 train_time:107728ms step_avg:140.09ms | |
step:780/1330 train_time:107875ms step_avg:140.10ms | |
step:781/1330 train_time:108021ms step_avg:140.11ms | |
step:782/1330 train_time:108167ms step_avg:140.11ms | |
step:783/1330 train_time:108312ms step_avg:140.12ms | |
step:784/1330 train_time:108456ms step_avg:140.12ms | |
step:785/1330 train_time:108601ms step_avg:140.13ms | |
step:786/1330 train_time:108748ms step_avg:140.14ms | |
step:787/1330 train_time:108892ms step_avg:140.14ms | |
step:788/1330 train_time:109040ms step_avg:140.15ms | |
step:789/1330 train_time:109185ms step_avg:140.16ms | |
step:790/1330 train_time:109331ms step_avg:140.17ms | |
step:791/1330 train_time:109478ms step_avg:140.18ms | |
step:792/1330 train_time:109624ms step_avg:140.18ms | |
step:793/1330 train_time:109770ms step_avg:140.19ms | |
step:794/1330 train_time:109919ms step_avg:140.20ms | |
step:795/1330 train_time:110073ms step_avg:140.22ms | |
step:796/1330 train_time:110220ms step_avg:140.23ms | |
step:797/1330 train_time:110366ms step_avg:140.24ms | |
step:798/1330 train_time:110516ms step_avg:140.25ms | |
step:799/1330 train_time:110664ms step_avg:140.26ms | |
step:800/1330 train_time:110809ms step_avg:140.26ms | |
step:801/1330 train_time:110957ms step_avg:140.27ms | |
step:802/1330 train_time:111104ms step_avg:140.28ms | |
step:803/1330 train_time:111250ms step_avg:140.29ms | |
step:804/1330 train_time:111396ms step_avg:140.30ms | |
step:805/1330 train_time:111544ms step_avg:140.31ms | |
step:806/1330 train_time:111690ms step_avg:140.31ms | |
step:807/1330 train_time:111836ms step_avg:140.32ms | |
step:808/1330 train_time:111985ms step_avg:140.33ms | |
step:809/1330 train_time:112133ms step_avg:140.34ms | |
step:810/1330 train_time:112282ms step_avg:140.35ms | |
step:811/1330 train_time:112429ms step_avg:140.36ms | |
step:812/1330 train_time:112576ms step_avg:140.37ms | |
step:813/1330 train_time:112723ms step_avg:140.38ms | |
step:814/1330 train_time:112869ms step_avg:140.38ms | |
step:815/1330 train_time:113015ms step_avg:140.39ms | |
step:816/1330 train_time:113164ms step_avg:140.40ms | |
step:817/1330 train_time:113309ms step_avg:140.41ms | |
step:818/1330 train_time:113456ms step_avg:140.42ms | |
step:819/1330 train_time:113603ms step_avg:140.42ms | |
step:820/1330 train_time:113752ms step_avg:140.43ms | |
step:821/1330 train_time:113898ms step_avg:140.44ms | |
step:822/1330 train_time:114044ms step_avg:140.45ms | |
step:823/1330 train_time:114189ms step_avg:140.45ms | |
step:824/1330 train_time:114337ms step_avg:140.46ms | |
step:825/1330 train_time:114484ms step_avg:140.47ms | |
step:826/1330 train_time:114637ms step_avg:140.49ms | |
step:827/1330 train_time:114784ms step_avg:140.49ms | |
step:828/1330 train_time:114931ms step_avg:140.50ms | |
step:829/1330 train_time:115077ms step_avg:140.51ms | |
step:830/1330 train_time:115222ms step_avg:140.51ms | |
step:831/1330 train_time:115368ms step_avg:140.52ms | |
step:832/1330 train_time:115517ms step_avg:140.53ms | |
step:833/1330 train_time:115663ms step_avg:140.54ms | |
step:834/1330 train_time:115814ms step_avg:140.55ms | |
step:835/1330 train_time:115961ms step_avg:140.56ms | |
step:836/1330 train_time:116109ms step_avg:140.57ms | |
step:837/1330 train_time:116254ms step_avg:140.57ms | |
step:838/1330 train_time:116400ms step_avg:140.58ms | |
step:839/1330 train_time:116546ms step_avg:140.59ms | |
step:840/1330 train_time:116690ms step_avg:140.59ms | |
step:841/1330 train_time:116836ms step_avg:140.60ms | |
step:842/1330 train_time:116984ms step_avg:140.61ms | |
step:843/1330 train_time:117130ms step_avg:140.61ms | |
step:844/1330 train_time:117277ms step_avg:140.62ms | |
step:845/1330 train_time:117424ms step_avg:140.63ms | |
step:846/1330 train_time:117575ms step_avg:140.64ms | |
step:847/1330 train_time:117721ms step_avg:140.65ms | |
step:848/1330 train_time:117867ms step_avg:140.65ms | |
step:849/1330 train_time:118015ms step_avg:140.66ms | |
step:850/1330 train_time:118163ms step_avg:140.67ms | |
step:851/1330 train_time:118315ms step_avg:140.68ms | |
step:852/1330 train_time:118463ms step_avg:140.69ms | |
step:853/1330 train_time:118611ms step_avg:140.70ms | |
step:854/1330 train_time:118757ms step_avg:140.71ms | |
step:855/1330 train_time:118903ms step_avg:140.71ms | |
step:856/1330 train_time:119051ms step_avg:140.72ms | |
step:857/1330 train_time:119199ms step_avg:140.73ms | |
step:858/1330 train_time:119347ms step_avg:140.74ms | |
step:859/1330 train_time:119493ms step_avg:140.75ms | |
step:860/1330 train_time:119638ms step_avg:140.75ms | |
step:861/1330 train_time:119787ms step_avg:140.76ms | |
step:862/1330 train_time:119931ms step_avg:140.76ms | |
step:863/1330 train_time:120080ms step_avg:140.77ms | |
step:864/1330 train_time:120226ms step_avg:140.78ms | |
step:865/1330 train_time:120372ms step_avg:140.79ms | |
step:866/1330 train_time:120522ms step_avg:140.80ms | |
step:867/1330 train_time:120666ms step_avg:140.80ms | |
step:868/1330 train_time:120811ms step_avg:140.80ms | |
step:869/1330 train_time:120959ms step_avg:140.81ms | |
step:870/1330 train_time:121105ms step_avg:140.82ms | |
step:871/1330 train_time:121251ms step_avg:140.83ms | |
step:872/1330 train_time:121401ms step_avg:140.84ms | |
step:873/1330 train_time:121548ms step_avg:140.84ms | |
step:874/1330 train_time:121700ms step_avg:140.86ms | |
step:875/1330 train_time:121845ms step_avg:140.86ms | |
step:875/1330 val_loss:3.4522 train_time:121912ms step_avg:140.94ms | |
step:876/1330 train_time:121995ms step_avg:140.87ms | |
step:877/1330 train_time:122148ms step_avg:140.89ms | |
step:878/1330 train_time:122294ms step_avg:140.89ms | |
step:879/1330 train_time:122442ms step_avg:140.90ms | |
step:880/1330 train_time:122586ms step_avg:140.90ms | |
step:881/1330 train_time:122730ms step_avg:140.91ms | |
step:882/1330 train_time:122878ms step_avg:140.92ms | |
step:883/1330 train_time:123028ms step_avg:140.93ms | |
step:884/1330 train_time:123177ms step_avg:140.94ms | |
step:885/1330 train_time:123327ms step_avg:140.95ms | |
step:886/1330 train_time:123474ms step_avg:140.95ms | |
step:887/1330 train_time:123621ms step_avg:140.96ms | |
step:888/1330 train_time:123769ms step_avg:140.97ms | |
step:889/1330 train_time:123916ms step_avg:140.97ms | |
step:890/1330 train_time:124064ms step_avg:140.98ms | |
step:891/1330 train_time:124212ms step_avg:140.99ms | |
step:892/1330 train_time:124360ms step_avg:141.00ms | |
step:893/1330 train_time:124506ms step_avg:141.00ms | |
step:894/1330 train_time:124655ms step_avg:141.01ms | |
step:895/1330 train_time:124802ms step_avg:141.02ms | |
step:896/1330 train_time:124947ms step_avg:141.02ms | |
step:897/1330 train_time:125099ms step_avg:141.04ms | |
step:898/1330 train_time:125249ms step_avg:141.05ms | |
step:899/1330 train_time:125397ms step_avg:141.05ms | |
step:900/1330 train_time:125546ms step_avg:141.06ms | |
step:901/1330 train_time:125692ms step_avg:141.07ms | |
step:902/1330 train_time:125837ms step_avg:141.07ms | |
step:903/1330 train_time:125987ms step_avg:141.08ms | |
step:904/1330 train_time:126135ms step_avg:141.09ms | |
step:905/1330 train_time:126283ms step_avg:141.10ms | |
step:906/1330 train_time:126434ms step_avg:141.11ms | |
step:907/1330 train_time:126587ms step_avg:141.12ms | |
step:908/1330 train_time:126732ms step_avg:141.13ms | |
step:909/1330 train_time:126879ms step_avg:141.13ms | |
step:910/1330 train_time:127029ms step_avg:141.14ms | |
step:911/1330 train_time:127177ms step_avg:141.15ms | |
step:912/1330 train_time:127324ms step_avg:141.16ms | |
step:913/1330 train_time:127474ms step_avg:141.17ms | |
step:914/1330 train_time:127620ms step_avg:141.17ms | |
step:915/1330 train_time:127771ms step_avg:141.18ms | |
step:916/1330 train_time:127921ms step_avg:141.19ms | |
step:917/1330 train_time:128070ms step_avg:141.20ms | |
step:918/1330 train_time:128216ms step_avg:141.21ms | |
step:919/1330 train_time:128365ms step_avg:141.22ms | |
step:920/1330 train_time:128510ms step_avg:141.22ms | |
step:921/1330 train_time:128660ms step_avg:141.23ms | |
step:922/1330 train_time:128810ms step_avg:141.24ms | |
step:923/1330 train_time:128956ms step_avg:141.24ms | |
step:924/1330 train_time:129105ms step_avg:141.25ms | |
step:925/1330 train_time:129255ms step_avg:141.26ms | |
step:926/1330 train_time:129402ms step_avg:141.27ms | |
step:927/1330 train_time:129553ms step_avg:141.28ms | |
step:928/1330 train_time:129701ms step_avg:141.29ms | |
step:929/1330 train_time:129849ms step_avg:141.29ms | |
step:930/1330 train_time:129997ms step_avg:141.30ms | |
step:931/1330 train_time:130144ms step_avg:141.31ms | |
step:932/1330 train_time:130292ms step_avg:141.31ms | |
step:933/1330 train_time:130442ms step_avg:141.32ms | |
step:934/1330 train_time:130588ms step_avg:141.33ms | |
step:935/1330 train_time:130739ms step_avg:141.34ms | |
step:936/1330 train_time:130887ms step_avg:141.35ms | |
step:937/1330 train_time:131039ms step_avg:141.36ms | |
step:938/1330 train_time:131185ms step_avg:141.36ms | |
step:939/1330 train_time:131335ms step_avg:141.37ms | |
step:940/1330 train_time:131481ms step_avg:141.38ms | |
step:941/1330 train_time:131628ms step_avg:141.38ms | |
step:942/1330 train_time:131777ms step_avg:141.39ms | |
step:943/1330 train_time:131927ms step_avg:141.40ms | |
step:944/1330 train_time:132082ms step_avg:141.42ms | |
step:945/1330 train_time:132233ms step_avg:141.43ms | |
step:946/1330 train_time:132382ms step_avg:141.43ms | |
step:947/1330 train_time:132530ms step_avg:141.44ms | |
step:948/1330 train_time:132679ms step_avg:141.45ms | |
step:949/1330 train_time:132826ms step_avg:141.45ms | |
step:950/1330 train_time:132973ms step_avg:141.46ms | |
step:951/1330 train_time:133124ms step_avg:141.47ms | |
step:952/1330 train_time:133273ms step_avg:141.48ms | |
step:953/1330 train_time:133422ms step_avg:141.49ms | |
step:954/1330 train_time:133569ms step_avg:141.49ms | |
step:955/1330 train_time:133717ms step_avg:141.50ms | |
step:956/1330 train_time:133868ms step_avg:141.51ms | |
step:957/1330 train_time:134017ms step_avg:141.52ms | |
step:958/1330 train_time:134169ms step_avg:141.53ms | |
step:959/1330 train_time:134319ms step_avg:141.54ms | |
step:960/1330 train_time:134468ms step_avg:141.55ms | |
step:961/1330 train_time:134615ms step_avg:141.55ms | |
step:962/1330 train_time:134761ms step_avg:141.56ms | |
step:963/1330 train_time:134913ms step_avg:141.57ms | |
step:964/1330 train_time:135059ms step_avg:141.57ms | |
step:965/1330 train_time:135205ms step_avg:141.58ms | |
step:966/1330 train_time:135354ms step_avg:141.58ms | |
step:967/1330 train_time:135503ms step_avg:141.59ms | |
step:968/1330 train_time:135650ms step_avg:141.60ms | |
step:969/1330 train_time:135802ms step_avg:141.61ms | |
step:970/1330 train_time:135948ms step_avg:141.61ms | |
step:971/1330 train_time:136098ms step_avg:141.62ms | |
step:972/1330 train_time:136245ms step_avg:141.63ms | |
step:973/1330 train_time:136392ms step_avg:141.63ms | |
step:974/1330 train_time:136542ms step_avg:141.64ms | |
step:975/1330 train_time:136689ms step_avg:141.65ms | |
step:976/1330 train_time:136836ms step_avg:141.65ms | |
step:977/1330 train_time:136982ms step_avg:141.66ms | |
step:978/1330 train_time:137130ms step_avg:141.66ms | |
step:979/1330 train_time:137278ms step_avg:141.67ms | |
step:980/1330 train_time:137423ms step_avg:141.67ms | |
step:981/1330 train_time:137569ms step_avg:141.68ms | |
step:982/1330 train_time:137718ms step_avg:141.69ms | |
step:983/1330 train_time:137864ms step_avg:141.69ms | |
step:984/1330 train_time:138011ms step_avg:141.69ms | |
step:985/1330 train_time:138161ms step_avg:141.70ms | |
step:986/1330 train_time:138309ms step_avg:141.71ms | |
step:987/1330 train_time:138455ms step_avg:141.71ms | |
step:988/1330 train_time:138603ms step_avg:141.72ms | |
step:989/1330 train_time:138752ms step_avg:141.73ms | |
step:990/1330 train_time:138903ms step_avg:141.74ms | |
step:991/1330 train_time:139052ms step_avg:141.74ms | |
step:992/1330 train_time:139203ms step_avg:141.75ms | |
step:993/1330 train_time:139357ms step_avg:141.77ms | |
step:994/1330 train_time:139505ms step_avg:141.77ms | |
step:995/1330 train_time:139653ms step_avg:141.78ms | |
step:996/1330 train_time:139800ms step_avg:141.78ms | |
step:997/1330 train_time:139950ms step_avg:141.79ms | |
step:998/1330 train_time:140099ms step_avg:141.80ms | |
step:999/1330 train_time:140249ms step_avg:141.81ms | |
step:1000/1330 train_time:140398ms step_avg:141.82ms | |
step:1000/1330 val_loss:3.3879 train_time:140469ms step_avg:141.89ms | |
step:1001/1330 train_time:140551ms step_avg:141.83ms | |
step:1002/1330 train_time:140703ms step_avg:141.84ms | |
step:1003/1330 train_time:140858ms step_avg:141.85ms | |
step:1004/1330 train_time:141008ms step_avg:141.86ms | |
step:1005/1330 train_time:141154ms step_avg:141.86ms | |
step:1006/1330 train_time:141298ms step_avg:141.87ms | |
step:1007/1330 train_time:141447ms step_avg:141.87ms | |
step:1008/1330 train_time:141598ms step_avg:141.88ms | |
step:1009/1330 train_time:141752ms step_avg:141.89ms | |
step:1010/1330 train_time:141898ms step_avg:141.90ms | |
step:1011/1330 train_time:142046ms step_avg:141.90ms | |
step:1012/1330 train_time:142193ms step_avg:141.91ms | |
step:1013/1330 train_time:142345ms step_avg:141.92ms | |
step:1014/1330 train_time:142493ms step_avg:141.93ms | |
step:1015/1330 train_time:142644ms step_avg:141.93ms | |
step:1016/1330 train_time:142793ms step_avg:141.94ms | |
step:1017/1330 train_time:142944ms step_avg:141.95ms | |
step:1018/1330 train_time:143091ms step_avg:141.96ms | |
step:1019/1330 train_time:143240ms step_avg:141.96ms | |
step:1020/1330 train_time:143393ms step_avg:141.97ms | |
step:1021/1330 train_time:143540ms step_avg:141.98ms | |
step:1022/1330 train_time:143691ms step_avg:141.99ms | |
step:1023/1330 train_time:143843ms step_avg:142.00ms | |
step:1024/1330 train_time:143993ms step_avg:142.00ms | |
step:1025/1330 train_time:144145ms step_avg:142.01ms | |
step:1026/1330 train_time:144291ms step_avg:142.02ms | |
step:1027/1330 train_time:144439ms step_avg:142.02ms | |
step:1028/1330 train_time:144593ms step_avg:142.04ms | |
step:1029/1330 train_time:144746ms step_avg:142.05ms | |
step:1030/1330 train_time:144895ms step_avg:142.05ms | |
step:1031/1330 train_time:145043ms step_avg:142.06ms | |
step:1032/1330 train_time:145191ms step_avg:142.07ms | |
step:1033/1330 train_time:145339ms step_avg:142.07ms | |
step:1034/1330 train_time:145489ms step_avg:142.08ms | |
step:1035/1330 train_time:145638ms step_avg:142.09ms | |
step:1036/1330 train_time:145787ms step_avg:142.09ms | |
step:1037/1330 train_time:145939ms step_avg:142.10ms | |
step:1038/1330 train_time:146087ms step_avg:142.11ms | |
step:1039/1330 train_time:146235ms step_avg:142.11ms | |
step:1040/1330 train_time:146383ms step_avg:142.12ms | |
step:1041/1330 train_time:146533ms step_avg:142.13ms | |
step:1042/1330 train_time:146678ms step_avg:142.13ms | |
step:1043/1330 train_time:146826ms step_avg:142.14ms | |
step:1044/1330 train_time:146974ms step_avg:142.14ms | |
step:1045/1330 train_time:147123ms step_avg:142.15ms | |
step:1046/1330 train_time:147273ms step_avg:142.16ms | |
step:1047/1330 train_time:147422ms step_avg:142.16ms | |
step:1048/1330 train_time:147570ms step_avg:142.17ms | |
step:1049/1330 train_time:147716ms step_avg:142.17ms | |
step:1050/1330 train_time:147865ms step_avg:142.18ms | |
step:1051/1330 train_time:148016ms step_avg:142.19ms | |
step:1052/1330 train_time:148166ms step_avg:142.19ms | |
step:1053/1330 train_time:148313ms step_avg:142.20ms | |
step:1054/1330 train_time:148462ms step_avg:142.20ms | |
step:1055/1330 train_time:148607ms step_avg:142.21ms | |
step:1056/1330 train_time:148756ms step_avg:142.21ms | |
step:1057/1330 train_time:148905ms step_avg:142.22ms | |
step:1058/1330 train_time:149059ms step_avg:142.23ms | |
step:1059/1330 train_time:149208ms step_avg:142.24ms | |
step:1060/1330 train_time:149353ms step_avg:142.24ms | |
step:1061/1330 train_time:149499ms step_avg:142.24ms | |
step:1062/1330 train_time:149649ms step_avg:142.25ms | |
step:1063/1330 train_time:149798ms step_avg:142.26ms | |
step:1064/1330 train_time:149948ms step_avg:142.27ms | |
step:1065/1330 train_time:150099ms step_avg:142.27ms | |
step:1066/1330 train_time:150249ms step_avg:142.28ms | |
step:1067/1330 train_time:150401ms step_avg:142.29ms | |
step:1068/1330 train_time:150547ms step_avg:142.29ms | |
step:1069/1330 train_time:150696ms step_avg:142.30ms | |
step:1070/1330 train_time:150846ms step_avg:142.31ms | |
step:1071/1330 train_time:151003ms step_avg:142.32ms | |
step:1072/1330 train_time:151151ms step_avg:142.33ms | |
step:1073/1330 train_time:151298ms step_avg:142.33ms | |
step:1074/1330 train_time:151447ms step_avg:142.34ms | |
step:1075/1330 train_time:151596ms step_avg:142.34ms | |
step:1076/1330 train_time:151745ms step_avg:142.35ms | |
step:1077/1330 train_time:151893ms step_avg:142.35ms | |
step:1078/1330 train_time:152046ms step_avg:142.36ms | |
step:1079/1330 train_time:152194ms step_avg:142.37ms | |
step:1080/1330 train_time:152344ms step_avg:142.38ms | |
step:1081/1330 train_time:152492ms step_avg:142.38ms | |
step:1082/1330 train_time:152642ms step_avg:142.39ms | |
step:1083/1330 train_time:152789ms step_avg:142.39ms | |
step:1084/1330 train_time:152940ms step_avg:142.40ms | |
step:1085/1330 train_time:153091ms step_avg:142.41ms | |
step:1086/1330 train_time:153242ms step_avg:142.42ms | |
step:1087/1330 train_time:153391ms step_avg:142.42ms | |
step:1088/1330 train_time:153543ms step_avg:142.43ms | |
step:1089/1330 train_time:153698ms step_avg:142.44ms | |
step:1090/1330 train_time:153851ms step_avg:142.45ms | |
step:1091/1330 train_time:153999ms step_avg:142.46ms | |
step:1092/1330 train_time:154147ms step_avg:142.47ms | |
step:1093/1330 train_time:154299ms step_avg:142.47ms | |
step:1094/1330 train_time:154448ms step_avg:142.48ms | |
step:1095/1330 train_time:154597ms step_avg:142.49ms | |
step:1096/1330 train_time:154751ms step_avg:142.50ms | |
step:1097/1330 train_time:154902ms step_avg:142.50ms | |
step:1098/1330 train_time:155058ms step_avg:142.52ms | |
step:1099/1330 train_time:155208ms step_avg:142.52ms | |
step:1100/1330 train_time:155355ms step_avg:142.53ms | |
step:1101/1330 train_time:155506ms step_avg:142.54ms | |
step:1102/1330 train_time:155657ms step_avg:142.54ms | |
step:1103/1330 train_time:155806ms step_avg:142.55ms | |
step:1104/1330 train_time:155952ms step_avg:142.55ms | |
step:1105/1330 train_time:156104ms step_avg:142.56ms | |
step:1106/1330 train_time:156252ms step_avg:142.57ms | |
step:1107/1330 train_time:156402ms step_avg:142.57ms | |
step:1108/1330 train_time:156551ms step_avg:142.58ms | |
step:1109/1330 train_time:156698ms step_avg:142.58ms | |
step:1110/1330 train_time:156849ms step_avg:142.59ms | |
step:1111/1330 train_time:156999ms step_avg:142.60ms | |
step:1112/1330 train_time:157150ms step_avg:142.60ms | |
step:1113/1330 train_time:157298ms step_avg:142.61ms | |
step:1114/1330 train_time:157451ms step_avg:142.62ms | |
step:1115/1330 train_time:157605ms step_avg:142.63ms | |
step:1116/1330 train_time:157751ms step_avg:142.63ms | |
step:1117/1330 train_time:157901ms step_avg:142.64ms | |
step:1118/1330 train_time:158056ms step_avg:142.65ms | |
step:1119/1330 train_time:158205ms step_avg:142.66ms | |
step:1120/1330 train_time:158353ms step_avg:142.66ms | |
step:1121/1330 train_time:158501ms step_avg:142.67ms | |
step:1122/1330 train_time:158651ms step_avg:142.67ms | |
step:1123/1330 train_time:158798ms step_avg:142.68ms | |
step:1124/1330 train_time:158949ms step_avg:142.68ms | |
step:1125/1330 train_time:159099ms step_avg:142.69ms | |
step:1125/1330 val_loss:3.3396 train_time:159169ms step_avg:142.75ms | |
step:1126/1330 train_time:159252ms step_avg:142.70ms | |
step:1127/1330 train_time:159405ms step_avg:142.71ms | |
step:1128/1330 train_time:159557ms step_avg:142.72ms | |
step:1129/1330 train_time:159710ms step_avg:142.73ms | |
step:1130/1330 train_time:159858ms step_avg:142.73ms | |
step:1131/1330 train_time:160010ms step_avg:142.74ms | |
step:1132/1330 train_time:160157ms step_avg:142.74ms | |
step:1133/1330 train_time:160309ms step_avg:142.75ms | |
step:1134/1330 train_time:160461ms step_avg:142.76ms | |
step:1135/1330 train_time:160616ms step_avg:142.77ms | |
step:1136/1330 train_time:160770ms step_avg:142.78ms | |
step:1137/1330 train_time:160919ms step_avg:142.79ms | |
step:1138/1330 train_time:161074ms step_avg:142.80ms | |
step:1139/1330 train_time:161225ms step_avg:142.80ms | |
step:1140/1330 train_time:161372ms step_avg:142.81ms | |
step:1141/1330 train_time:161522ms step_avg:142.81ms | |
step:1142/1330 train_time:161669ms step_avg:142.82ms | |
step:1143/1330 train_time:161818ms step_avg:142.82ms | |
step:1144/1330 train_time:161970ms step_avg:142.83ms | |
step:1145/1330 train_time:162118ms step_avg:142.84ms | |
step:1146/1330 train_time:162272ms step_avg:142.84ms | |
step:1147/1330 train_time:162419ms step_avg:142.85ms | |
step:1148/1330 train_time:162572ms step_avg:142.86ms | |
step:1149/1330 train_time:162727ms step_avg:142.87ms | |
step:1150/1330 train_time:162875ms step_avg:142.87ms | |
step:1151/1330 train_time:163031ms step_avg:142.88ms | |
step:1152/1330 train_time:163184ms step_avg:142.89ms | |
step:1153/1330 train_time:163332ms step_avg:142.90ms | |
step:1154/1330 train_time:163480ms step_avg:142.90ms | |
step:1155/1330 train_time:163631ms step_avg:142.91ms | |
step:1156/1330 train_time:163785ms step_avg:142.92ms | |
step:1157/1330 train_time:163934ms step_avg:142.92ms | |
step:1158/1330 train_time:164084ms step_avg:142.93ms | |
step:1159/1330 train_time:164234ms step_avg:142.94ms | |
step:1160/1330 train_time:164386ms step_avg:142.94ms | |
step:1161/1330 train_time:164535ms step_avg:142.95ms | |
step:1162/1330 train_time:164684ms step_avg:142.95ms | |
step:1163/1330 train_time:164837ms step_avg:142.96ms | |
step:1164/1330 train_time:164988ms step_avg:142.97ms | |
step:1165/1330 train_time:165136ms step_avg:142.97ms | |
step:1166/1330 train_time:165285ms step_avg:142.98ms | |
step:1167/1330 train_time:165435ms step_avg:142.99ms | |
step:1168/1330 train_time:165589ms step_avg:143.00ms | |
step:1169/1330 train_time:165742ms step_avg:143.00ms | |
step:1170/1330 train_time:165891ms step_avg:143.01ms | |
step:1171/1330 train_time:166038ms step_avg:143.01ms | |
step:1172/1330 train_time:166186ms step_avg:143.02ms | |
step:1173/1330 train_time:166336ms step_avg:143.02ms | |
step:1174/1330 train_time:166498ms step_avg:143.04ms | |
step:1175/1330 train_time:166649ms step_avg:143.05ms | |
step:1176/1330 train_time:166800ms step_avg:143.05ms | |
step:1177/1330 train_time:166954ms step_avg:143.06ms | |
step:1178/1330 train_time:167108ms step_avg:143.07ms | |
step:1179/1330 train_time:167254ms step_avg:143.07ms | |
step:1180/1330 train_time:167410ms step_avg:143.09ms | |
step:1181/1330 train_time:167557ms step_avg:143.09ms | |
step:1182/1330 train_time:167708ms step_avg:143.10ms | |
step:1183/1330 train_time:167859ms step_avg:143.10ms | |
step:1184/1330 train_time:168012ms step_avg:143.11ms | |
step:1185/1330 train_time:168165ms step_avg:143.12ms | |
step:1186/1330 train_time:168320ms step_avg:143.13ms | |
step:1187/1330 train_time:168484ms step_avg:143.15ms | |
step:1188/1330 train_time:168631ms step_avg:143.15ms | |
step:1189/1330 train_time:168787ms step_avg:143.16ms | |
step:1190/1330 train_time:168936ms step_avg:143.17ms | |
step:1191/1330 train_time:169088ms step_avg:143.17ms | |
step:1192/1330 train_time:169240ms step_avg:143.18ms | |
step:1193/1330 train_time:169390ms step_avg:143.19ms | |
step:1194/1330 train_time:169541ms step_avg:143.19ms | |
step:1195/1330 train_time:169694ms step_avg:143.20ms | |
step:1196/1330 train_time:169845ms step_avg:143.21ms | |
step:1197/1330 train_time:169995ms step_avg:143.21ms | |
step:1198/1330 train_time:170154ms step_avg:143.23ms | |
step:1199/1330 train_time:170306ms step_avg:143.23ms | |
step:1200/1330 train_time:170455ms step_avg:143.24ms | |
step:1201/1330 train_time:170608ms step_avg:143.25ms | |
step:1202/1330 train_time:170771ms step_avg:143.26ms | |
step:1203/1330 train_time:170924ms step_avg:143.27ms | |
step:1204/1330 train_time:171077ms step_avg:143.28ms | |
step:1205/1330 train_time:171225ms step_avg:143.28ms | |
step:1206/1330 train_time:171377ms step_avg:143.29ms | |
step:1207/1330 train_time:171528ms step_avg:143.30ms | |
step:1208/1330 train_time:171678ms step_avg:143.30ms | |
step:1209/1330 train_time:171831ms step_avg:143.31ms | |
step:1210/1330 train_time:171984ms step_avg:143.32ms | |
step:1211/1330 train_time:172137ms step_avg:143.33ms | |
step:1212/1330 train_time:172290ms step_avg:143.34ms | |
step:1213/1330 train_time:172442ms step_avg:143.34ms | |
step:1214/1330 train_time:172592ms step_avg:143.35ms | |
step:1215/1330 train_time:172743ms step_avg:143.36ms | |
step:1216/1330 train_time:172891ms step_avg:143.36ms | |
step:1217/1330 train_time:173042ms step_avg:143.37ms | |
step:1218/1330 train_time:173191ms step_avg:143.37ms | |
step:1219/1330 train_time:173343ms step_avg:143.38ms | |
step:1220/1330 train_time:173494ms step_avg:143.38ms | |
step:1221/1330 train_time:173643ms step_avg:143.39ms | |
step:1222/1330 train_time:173791ms step_avg:143.39ms | |
step:1223/1330 train_time:173944ms step_avg:143.40ms | |
step:1224/1330 train_time:174097ms step_avg:143.41ms | |
step:1225/1330 train_time:174246ms step_avg:143.41ms | |
step:1226/1330 train_time:174397ms step_avg:143.42ms | |
step:1227/1330 train_time:174551ms step_avg:143.43ms | |
step:1228/1330 train_time:174698ms step_avg:143.43ms | |
step:1229/1330 train_time:174851ms step_avg:143.44ms | |
step:1230/1330 train_time:175010ms step_avg:143.45ms | |
step:1231/1330 train_time:175159ms step_avg:143.46ms | |
step:1232/1330 train_time:175311ms step_avg:143.46ms | |
step:1233/1330 train_time:175464ms step_avg:143.47ms | |
step:1234/1330 train_time:175616ms step_avg:143.48ms | |
step:1235/1330 train_time:175768ms step_avg:143.48ms | |
step:1236/1330 train_time:175919ms step_avg:143.49ms | |
step:1237/1330 train_time:176069ms step_avg:143.50ms | |
step:1238/1330 train_time:176235ms step_avg:143.51ms | |
step:1239/1330 train_time:176388ms step_avg:143.52ms | |
step:1240/1330 train_time:176544ms step_avg:143.53ms | |
step:1241/1330 train_time:176697ms step_avg:143.54ms | |
step:1242/1330 train_time:176846ms step_avg:143.54ms | |
step:1243/1330 train_time:176999ms step_avg:143.55ms | |
step:1244/1330 train_time:177151ms step_avg:143.56ms | |
step:1245/1330 train_time:177304ms step_avg:143.57ms | |
step:1246/1330 train_time:177452ms step_avg:143.57ms | |
step:1247/1330 train_time:177601ms step_avg:143.57ms | |
step:1248/1330 train_time:177751ms step_avg:143.58ms | |
step:1249/1330 train_time:177900ms step_avg:143.58ms | |
step:1250/1330 train_time:178053ms step_avg:143.59ms | |
step:1250/1330 val_loss:3.2970 train_time:178125ms step_avg:143.65ms | |
step:1251/1330 train_time:178208ms step_avg:143.60ms | |
step:1252/1330 train_time:178357ms step_avg:143.60ms | |
step:1253/1330 train_time:178505ms step_avg:143.61ms | |
step:1254/1330 train_time:178652ms step_avg:143.61ms | |
step:1255/1330 train_time:178813ms step_avg:143.63ms | |
step:1256/1330 train_time:178961ms step_avg:143.63ms | |
step:1257/1330 train_time:179111ms step_avg:143.63ms | |
step:1258/1330 train_time:179269ms step_avg:143.65ms | |
step:1259/1330 train_time:179420ms step_avg:143.65ms | |
step:1260/1330 train_time:179565ms step_avg:143.65ms | |
step:1261/1330 train_time:179717ms step_avg:143.66ms | |
step:1262/1330 train_time:179870ms step_avg:143.67ms | |
step:1263/1330 train_time:180022ms step_avg:143.67ms | |
step:1264/1330 train_time:180171ms step_avg:143.68ms | |
step:1265/1330 train_time:180320ms step_avg:143.68ms | |
step:1266/1330 train_time:180474ms step_avg:143.69ms | |
step:1267/1330 train_time:180626ms step_avg:143.70ms | |
step:1268/1330 train_time:180778ms step_avg:143.70ms | |
step:1269/1330 train_time:180934ms step_avg:143.71ms | |
step:1270/1330 train_time:181081ms step_avg:143.71ms | |
step:1271/1330 train_time:181231ms step_avg:143.72ms | |
step:1272/1330 train_time:181377ms step_avg:143.72ms | |
step:1273/1330 train_time:181526ms step_avg:143.73ms | |
step:1274/1330 train_time:181677ms step_avg:143.73ms | |
step:1275/1330 train_time:181830ms step_avg:143.74ms | |
step:1276/1330 train_time:181980ms step_avg:143.74ms | |
step:1277/1330 train_time:182132ms step_avg:143.75ms | |
step:1278/1330 train_time:182279ms step_avg:143.75ms | |
step:1279/1330 train_time:182431ms step_avg:143.76ms | |
step:1280/1330 train_time:182590ms step_avg:143.77ms | |
step:1281/1330 train_time:182738ms step_avg:143.78ms | |
step:1282/1330 train_time:182886ms step_avg:143.78ms | |
step:1283/1330 train_time:183037ms step_avg:143.78ms | |
step:1284/1330 train_time:183191ms step_avg:143.79ms | |
step:1285/1330 train_time:183340ms step_avg:143.80ms | |
step:1286/1330 train_time:183490ms step_avg:143.80ms | |
step:1287/1330 train_time:183641ms step_avg:143.81ms | |
step:1288/1330 train_time:183793ms step_avg:143.81ms | |
step:1289/1330 train_time:183952ms step_avg:143.82ms | |
step:1290/1330 train_time:184114ms step_avg:143.84ms | |
step:1291/1330 train_time:184269ms step_avg:143.85ms | |
step:1292/1330 train_time:184424ms step_avg:143.86ms | |
step:1293/1330 train_time:184585ms step_avg:143.87ms | |
step:1294/1330 train_time:184737ms step_avg:143.88ms | |
step:1295/1330 train_time:184887ms step_avg:143.88ms | |
step:1296/1330 train_time:185042ms step_avg:143.89ms | |
step:1297/1330 train_time:185196ms step_avg:143.90ms | |
step:1298/1330 train_time:185346ms step_avg:143.90ms | |
step:1299/1330 train_time:185497ms step_avg:143.91ms | |
step:1300/1330 train_time:185646ms step_avg:143.91ms | |
step:1301/1330 train_time:185795ms step_avg:143.92ms | |
step:1302/1330 train_time:185950ms step_avg:143.92ms | |
step:1303/1330 train_time:186110ms step_avg:143.94ms | |
step:1304/1330 train_time:186265ms step_avg:143.95ms | |
step:1305/1330 train_time:186413ms step_avg:143.95ms | |
step:1306/1330 train_time:186565ms step_avg:143.95ms | |
step:1307/1330 train_time:186717ms step_avg:143.96ms | |
step:1308/1330 train_time:186871ms step_avg:143.97ms | |
step:1309/1330 train_time:187031ms step_avg:143.98ms | |
step:1310/1330 train_time:187180ms step_avg:143.98ms | |
step:1311/1330 train_time:187331ms step_avg:143.99ms | |
step:1312/1330 train_time:187485ms step_avg:144.00ms | |
step:1313/1330 train_time:187639ms step_avg:144.01ms | |
step:1314/1330 train_time:187791ms step_avg:144.01ms | |
step:1315/1330 train_time:187938ms step_avg:144.01ms | |
step:1316/1330 train_time:188088ms step_avg:144.02ms | |
step:1317/1330 train_time:188239ms step_avg:144.02ms | |
step:1318/1330 train_time:188397ms step_avg:144.03ms | |
step:1319/1330 train_time:188546ms step_avg:144.04ms | |
step:1320/1330 train_time:188696ms step_avg:144.04ms | |
step:1321/1330 train_time:188848ms step_avg:144.05ms | |
step:1322/1330 train_time:189012ms step_avg:144.06ms | |
step:1323/1330 train_time:189167ms step_avg:144.07ms | |
step:1324/1330 train_time:189317ms step_avg:144.08ms | |
step:1325/1330 train_time:189468ms step_avg:144.08ms | |
step:1326/1330 train_time:189622ms step_avg:144.09ms | |
step:1327/1330 train_time:189771ms step_avg:144.09ms | |
step:1328/1330 train_time:189920ms step_avg:144.10ms | |
step:1329/1330 train_time:190086ms step_avg:144.11ms | |
step:1330/1330 train_time:190238ms step_avg:144.12ms | |
step:1330/1330 val_loss:3.2774 train_time:190311ms step_avg:144.17ms | |
peak memory consumption: 37649 MiB |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
One note not included in this gist, you must use the Dockerfile in the main repo as of today, but update the torch dependency to a more modern version: 2.7.0.dev20250107