Skip to content

Instantly share code, notes, and snippets.

View isaacmg's full-sized avatar

Isaac Godfried isaacmg

View GitHub Profile
label = conll04_eval
model_type = spert
model_path = data/models/ade
tokenizer_path = data/models/ade
dataset_path = data/datasets/ade/ade_split_0_test.json
types_path = data/datasets/ade/ade_types.json
eval_batch_size = 1
rel_filter_threshold = 0.4
size_embedding = 25
prop_drop = 0.1
_column_definition = [
('id', DataTypes.REAL_VALUED, InputTypes.ID),
('hours_from_start', DataTypes.REAL_VALUED, InputTypes.TIME),
('power_usage', DataTypes.REAL_VALUED, InputTypes.TARGET),
('hour', DataTypes.REAL_VALUED, InputTypes.KNOWN_INPUT),
('day_of_week', DataTypes.REAL_VALUED, InputTypes.KNOWN_INPUT),
('hours_from_start', DataTypes.REAL_VALUED, InputTypes.KNOWN_INPUT),
('categorical_id', DataTypes.CATEGORICAL, InputTypes.STATIC_INPUT),
]
model = model_class.from_pretrained(self.args.model_path,
config=config,
# SpERT model parameters
cls_token=self._tokenizer.convert_tokens_to_ids('[CLS]'),
relation_types=input_reader.relation_type_count - 1,
entity_types=input_reader.entity_type_count,
max_pairs=self.args.max_pairs,
prop_drop=self.args.prop_drop,
size_embedding=self.args.size_embedding,
freeze_transformer=self.args.freeze_transformer)
import torch
class TransformerXCBasic(torch.nn.Module):
""" Transformer model """
def __init__(self, n_time_series, out_seq_len, device, d_model=128, dropout=.5, n_head=8):
super(TransformerXCBasic, self).__init__()
self.input_dim = n_time_series
self.n_head = n_head
wandb_version: 1
GCS:
desc: null
value: true
_wandb:
desc: null
value:
cli_version: 0.10.17
framework: torch
{
"model_name": "Crossformer",
"use_decoder": true,
"model_type": "PyTorch",
"model_params": {
"n_time_series": 4,
"forecast_history":6,
"forecast_length": 4,
"seg_len": 6
},