73 lines
1.2 KiB
YAML
73 lines
1.2 KiB
YAML
basic:
|
|
dataset: "PEMSD7"
|
|
mode: "train"
|
|
device: "cuda:0"
|
|
model: "STMLP"
|
|
|
|
data:
|
|
num_nodes: 883
|
|
lag: 12
|
|
horizon: 12
|
|
val_ratio: 0.2
|
|
test_ratio: 0.2
|
|
tod: False
|
|
normalizer: std
|
|
column_wise: False
|
|
default_graph: True
|
|
add_time_in_day: True
|
|
add_day_in_week: True
|
|
steps_per_day: 288
|
|
days_per_week: 7
|
|
|
|
model:
|
|
input_dim: 1
|
|
output_dim: 1
|
|
input_window: 12
|
|
output_window: 12
|
|
gcn_true: true
|
|
buildA_true: true
|
|
gcn_depth: 2
|
|
dropout: 0.3
|
|
subgraph_size: 20
|
|
node_dim: 40
|
|
dilation_exponential: 1
|
|
conv_channels: 32
|
|
residual_channels: 32
|
|
skip_channels: 64
|
|
end_channels: 128
|
|
layers: 3
|
|
propalpha: 0.05
|
|
tanhalpha: 3
|
|
layer_norm_affline: true
|
|
use_curriculum_learning: true
|
|
step_size1: 2500
|
|
task_level: 0
|
|
num_split: 1
|
|
step_size2: 100
|
|
model_type: stmlp
|
|
|
|
train:
|
|
loss_func: mae
|
|
seed: 10
|
|
batch_size: 16
|
|
epochs: 300
|
|
lr_init: 0.003
|
|
weight_decay: 0
|
|
lr_decay: False
|
|
lr_decay_rate: 0.3
|
|
lr_decay_step: "5,20,40,70"
|
|
early_stop: True
|
|
early_stop_patience: 15
|
|
grad_norm: False
|
|
max_grad_norm: 5
|
|
real_value: True
|
|
teacher_stu: True
|
|
|
|
test:
|
|
mae_thresh: null
|
|
mape_thresh: 0.0
|
|
|
|
log:
|
|
log_step: 2000
|
|
plot: False
|