67 lines
1.1 KiB
YAML
67 lines
1.1 KiB
YAML
basic:
|
|
device: cuda:0
|
|
dataset: PEMS08
|
|
model: STGODE-LLM-GPT2
|
|
mode: train
|
|
seed: 2025
|
|
|
|
data:
|
|
dataset_dir: data/PEMS08
|
|
val_batch_size: 16
|
|
graph_pkl_filename: data/PEMS08/PEMS08_spatial_distance.npy
|
|
num_nodes: 170
|
|
batch_size: 32
|
|
input_dim: 1
|
|
lag: 12
|
|
horizon: 12
|
|
val_ratio: 0.2
|
|
test_ratio: 0.2
|
|
tod: False
|
|
normalizer: std
|
|
column_wise: False
|
|
default_graph: True
|
|
add_time_in_day: True
|
|
add_day_in_week: True
|
|
steps_per_day: 24
|
|
days_per_week: 7
|
|
|
|
model:
|
|
input_dim: 1
|
|
output_dim: 1
|
|
history: 12
|
|
horizon: 12
|
|
num_features: 1
|
|
rnn_units: 64
|
|
sigma1: 0.1
|
|
sigma2: 10
|
|
thres1: 0.6
|
|
thres2: 0.5
|
|
# HF GPT-2 settings
|
|
gpt2_name: gpt2
|
|
gpt2_grad_ckpt: True
|
|
gpt2_freeze: True
|
|
gpt2_local_dir: ./models/gpt2
|
|
|
|
train:
|
|
loss: mae
|
|
batch_size: 32
|
|
epochs: 100
|
|
lr_init: 0.0003
|
|
mape_thresh: 0.001
|
|
mae_thresh: None
|
|
debug: False
|
|
output_dim: 1
|
|
weight_decay: 0
|
|
lr_decay: False
|
|
lr_decay_rate: 0.3
|
|
lr_decay_step: "10,30,60,90"
|
|
early_stop: True
|
|
early_stop_patience: 15
|
|
grad_norm: False
|
|
max_grad_norm: 5
|
|
real_value: True
|
|
log_step: 3000
|
|
|
|
|
|
|