72 lines
1.1 KiB
YAML
72 lines
1.1 KiB
YAML
data:
|
|
num_nodes: 307
|
|
lag: 12
|
|
horizon: 12
|
|
val_ratio: 0.2
|
|
test_ratio: 0.2
|
|
tod: False
|
|
normalizer: std
|
|
column_wise: False
|
|
default_graph: True
|
|
add_time_in_day: True
|
|
add_day_in_week: True
|
|
steps_per_day: 288
|
|
days_per_week: 7
|
|
|
|
model:
|
|
embed_dim: 64
|
|
skip_dim: 256
|
|
lape_dim: 8
|
|
geo_num_heads: 4
|
|
sem_num_heads: 2
|
|
t_num_heads: 2
|
|
mlp_ratio: 4
|
|
qkv_bias: True
|
|
drop: 0.
|
|
attn_drop: 0.
|
|
drop_path: 0.3
|
|
s_attn_size: 3
|
|
t_attn_size: 3
|
|
enc_depth: 6
|
|
type_ln: pre
|
|
type_short_path: hop
|
|
input_dim: 3
|
|
output_dim: 1
|
|
input_window: 12
|
|
output_window: 12
|
|
add_time_in_day: True
|
|
add_day_in_week: True
|
|
world_size: 1
|
|
huber_delta: 1
|
|
quan_delta: 0.25
|
|
far_mask_delta: 5
|
|
dtw_delta: 5
|
|
use_curriculum_learning: True
|
|
step_size: 2500
|
|
max_epoch: 200
|
|
task_level: 0
|
|
|
|
train:
|
|
loss_func: mae
|
|
seed: 10
|
|
batch_size: 64
|
|
epochs: 300
|
|
lr_init: 0.003
|
|
weight_decay: 0
|
|
lr_decay: False
|
|
lr_decay_rate: 0.3
|
|
lr_decay_step: "5,20,40,70"
|
|
early_stop: True
|
|
early_stop_patience: 15
|
|
grad_norm: False
|
|
max_grad_norm: 5
|
|
real_value: True
|
|
|
|
test:
|
|
mae_thresh: null
|
|
mape_thresh: 0.0
|
|
|
|
log:
|
|
log_step: 200
|
|
plot: False
|