REPST #3

Merged
czzhangheng merged 42 commits from REPST into main 2025-12-20 16:03:22 +08:00
13 changed files with 453 additions and 15 deletions
Showing only changes of commit 5c2380ae21 - Show all commits

4
.vscode/launch.json vendored
View File

@ -2107,12 +2107,12 @@
"args": "--config ./config/iTransformer/AirQuality.yaml" "args": "--config ./config/iTransformer/AirQuality.yaml"
}, },
{ {
"name": "iTransformer: SolarEnergy", "name": "HI: PEMS-BAY",
"type": "debugpy", "type": "debugpy",
"request": "launch", "request": "launch",
"program": "run.py", "program": "run.py",
"console": "integratedTerminal", "console": "integratedTerminal",
"args": "--config ./config/iTransformer/SolarEnergy.yaml" "args": "--config ./config/HI/PEMS-BAY.yaml"
}, },
] ]
} }

48
config/HI/AirQuality.yaml Normal file
View File

@ -0,0 +1,48 @@
basic:
dataset: AirQuality
device: cuda:0
mode: train
model: HI
seed: 2023
data:
batch_size: 16
column_wise: false
days_per_week: 7
horizon: 24
input_dim: 6
lag: 24
normalizer: std
num_nodes: 35
steps_per_day: 24
test_ratio: 0.2
val_ratio: 0.2
model:
input_len: 24
output_len: 24
reverse: False
train:
batch_size: 16
debug: false
early_stop: true
early_stop_patience: 15
epochs: 1
grad_norm: false
log_step: 1000
loss_func: mae
lr_decay: true
lr_decay_rate: 0.3
lr_decay_step: 5,20,40,70
lr_init: 0.0001
mae_thresh: None
mape_thresh: 0.001
max_grad_norm: 5
output_dim: 35
optimizer: null
plot: false
real_value: true
scheduler: null
weight_decay: 0

View File

@ -0,0 +1,48 @@
basic:
dataset: BJTaxi-InFlow
device: cuda:0
mode: train
model: HI
seed: 2023
data:
batch_size: 32
column_wise: false
days_per_week: 7
horizon: 24
input_dim: 1
lag: 24
normalizer: std
num_nodes: 1024
steps_per_day: 48
test_ratio: 0.2
val_ratio: 0.2
model:
input_len: 24
output_len: 24
reverse: False
train:
batch_size: 16
debug: false
early_stop: true
early_stop_patience: 15
epochs: 1
grad_norm: false
log_step: 1000
loss_func: mae
lr_decay: true
lr_decay_rate: 0.3
lr_decay_step: 5,20,40,70
lr_init: 0.0001
mae_thresh: None
mape_thresh: 0.001
max_grad_norm: 5
output_dim: 1024
optimizer: null
plot: false
real_value: true
scheduler: null
weight_decay: 0

View File

@ -0,0 +1,48 @@
basic:
dataset: BJTaxi-OutFlow
device: cuda:0
mode: train
model: HI
seed: 2023
data:
batch_size: 32
column_wise: false
days_per_week: 7
horizon: 24
input_dim: 1
lag: 24
normalizer: std
num_nodes: 1024
steps_per_day: 48
test_ratio: 0.2
val_ratio: 0.2
model:
input_len: 24
output_len: 24
reverse: False
train:
batch_size: 16
debug: false
early_stop: true
early_stop_patience: 15
epochs: 1
grad_norm: false
log_step: 1000
loss_func: mae
lr_decay: true
lr_decay_rate: 0.3
lr_decay_step: 5,20,40,70
lr_init: 0.0001
mae_thresh: None
mape_thresh: 0.001
max_grad_norm: 5
output_dim: 1024
optimizer: null
plot: false
real_value: true
scheduler: null
weight_decay: 0

48
config/HI/METR-LA.yaml Normal file
View File

@ -0,0 +1,48 @@
basic:
dataset: METR-LA
device: cuda:1
mode: train
model: HI
seed: 2023
data:
batch_size: 16
column_wise: false
days_per_week: 7
horizon: 24
input_dim: 1
lag: 24
normalizer: std
num_nodes: 207
steps_per_day: 288
test_ratio: 0.2
val_ratio: 0.2
model:
input_len: 24
output_len: 24
reverse: False
train:
batch_size: 16
debug: false
early_stop: true
early_stop_patience: 15
epochs: 1
grad_norm: false
log_step: 1000
loss_func: mae
lr_decay: true
lr_decay_rate: 0.3
lr_decay_step: 5,20,40,70
lr_init: 0.0001
mae_thresh: None
mape_thresh: 0.001
max_grad_norm: 5
output_dim: 207
optimizer: null
plot: false
real_value: true
scheduler: null
weight_decay: 0

View File

@ -0,0 +1,48 @@
basic:
dataset: NYCBike-InFlow
device: cuda:0
mode: train
model: HI
seed: 2023
data:
batch_size: 32
column_wise: false
days_per_week: 7
horizon: 24
input_dim: 1
lag: 24
normalizer: std
num_nodes: 128
steps_per_day: 48
test_ratio: 0.2
val_ratio: 0.2
model:
input_len: 24
output_len: 24
reverse: False
train:
batch_size: 16
debug: false
early_stop: true
early_stop_patience: 15
epochs: 1
grad_norm: false
log_step: 1000
loss_func: mae
lr_decay: true
lr_decay_rate: 0.3
lr_decay_step: 5,20,40,70
lr_init: 0.0001
mae_thresh: None
mape_thresh: 0.001
max_grad_norm: 5
output_dim: 128
optimizer: null
plot: false
real_value: true
scheduler: null
weight_decay: 0

View File

@ -0,0 +1,48 @@
basic:
dataset: NYCBike-OutFlow
device: cuda:0
mode: train
model: HI
seed: 2023
data:
batch_size: 32
column_wise: false
days_per_week: 7
horizon: 24
input_dim: 1
lag: 24
normalizer: std
num_nodes: 128
steps_per_day: 48
test_ratio: 0.2
val_ratio: 0.2
model:
input_len: 24
output_len: 24
reverse: False
train:
batch_size: 16
debug: false
early_stop: true
early_stop_patience: 15
epochs: 1
grad_norm: false
log_step: 1000
loss_func: mae
lr_decay: true
lr_decay_rate: 0.3
lr_decay_step: 5,20,40,70
lr_init: 0.0001
mae_thresh: None
mape_thresh: 0.001
max_grad_norm: 5
output_dim: 128
optimizer: null
plot: false
real_value: true
scheduler: null
weight_decay: 0

48
config/HI/PEMS-BAY.yaml Normal file
View File

@ -0,0 +1,48 @@
basic:
dataset: PEMS-BAY
device: cuda:0
mode: train
model: HI
seed: 2023
data:
batch_size: 16
column_wise: false
days_per_week: 7
horizon: 24
input_dim: 1
lag: 24
normalizer: std
num_nodes: 325
steps_per_day: 288
test_ratio: 0.2
val_ratio: 0.2
model:
input_len: 24
output_len: 24
reverse: False
train:
batch_size: 16
debug: false
early_stop: true
early_stop_patience: 15
epochs: 1
grad_norm: false
log_step: 1000
loss_func: mae
lr_decay: true
lr_decay_rate: 0.3
lr_decay_step: 5,20,40,70
lr_init: 0.0001
mae_thresh: None
mape_thresh: 0.001
max_grad_norm: 5
output_dim: 325
optimizer: null
plot: false
real_value: true
scheduler: null
weight_decay: 0

View File

@ -0,0 +1,48 @@
basic:
dataset: SolarEnergy
device: cuda:0
mode: train
model: HI
seed: 2023
data:
batch_size: 16
column_wise: false
days_per_week: 7
horizon: 24
input_dim: 6
lag: 24
normalizer: std
num_nodes: 137
steps_per_day: 24
test_ratio: 0.2
val_ratio: 0.2
model:
input_len: 24
output_len: 24
reverse: False
train:
batch_size: 16
debug: false
early_stop: true
early_stop_patience: 15
epochs: 1
grad_norm: false
log_step: 1000
loss_func: mae
lr_decay:
lr_decay_rate:
lr_decay_step:
lr_init: 0.0001
mae_thresh: None
mape_thresh: 0.001
max_grad_norm: 5
output_dim: 137
optimizer: null
plot: false
real_value: true
scheduler: null
weight_decay: 0

View File

@ -7,7 +7,11 @@ from dataloader.TSloader import get_dataloader as TS_loader
def get_dataloader(config, normalizer, single): def get_dataloader(config, normalizer, single):
TS_model = ["iTransformer", "HI"]
model_name = config["basic"]["model"] model_name = config["basic"]["model"]
if model_name in TS_model:
return TS_loader(config, normalizer, single)
else :
match model_name: match model_name:
case "STGNCDE": case "STGNCDE":
return cde_loader(config, normalizer, single) return cde_loader(config, normalizer, single)
@ -17,7 +21,5 @@ def get_dataloader(config, normalizer, single):
return DCRNN_loader(config, normalizer, single) return DCRNN_loader(config, normalizer, single)
case "EXP": case "EXP":
return EXP_loader(config, normalizer, single) return EXP_loader(config, normalizer, single)
case "iTransformer":
return TS_loader(config, normalizer, single)
case _: case _:
return normal_loader(config, normalizer, single) return normal_loader(config, normalizer, single)

45
model/HI/HI.py Normal file
View File

@ -0,0 +1,45 @@
from typing import List
import torch
from torch import nn
class HI(nn.Module):
"""
Paper: Historical Inertia: A Neglected but Powerful Baseline for Long Sequence Time-series Forecasting
Link: https://arxiv.org/abs/2103.16349
Official code: None
Venue: CIKM 2021
Task: Long-term Time Series Forecasting
"""
def __init__(self, config):
"""
Init HI.
Args:
config (HIConfig): model config.
"""
super().__init__()
self.input_len = config['input_len']
self.output_len = config['output_len']
assert self.input_len >= self.output_len, "HI model requires input length > output length"
self.reverse = config['reverse']
# self.fake_param = nn.Linear(1, 1, bias=False)
def forward(self, inputs: torch.Tensor) -> torch.Tensor:
"""Forward function of HI.
Args:
inputs (torch.Tensor): shape = [B, L_in, N]
Returns:
torch.Tensor: model prediction [B, L_out, N].
"""
# historical inertia
prediction = inputs[:, -self.output_len:, :]
# last point
# prediction = inputs[:, [-1], :].expand(-1, self.output_len, -1)
if self.reverse:
prediction = prediction.flip(dims=[1])
return prediction

View File

@ -1,4 +1,5 @@
from model.DDGCRN.DDGCRN import DDGCRN from model.DDGCRN.DDGCRN import DDGCRN
from model.HI import HI
from model.TWDGCN.TWDGCN import TWDGCN from model.TWDGCN.TWDGCN import TWDGCN
from model.AGCRN.AGCRN import AGCRN from model.AGCRN.AGCRN import AGCRN
from model.NLT.HierAttnLstm import HierAttnLstm from model.NLT.HierAttnLstm import HierAttnLstm
@ -27,6 +28,7 @@ from model.ASTRA.astra import ASTRA as ASTRA
from model.ASTRA.astrav2 import ASTRA as ASTRAv2 from model.ASTRA.astrav2 import ASTRA as ASTRAv2
from model.ASTRA.astrav3 import ASTRA as ASTRAv3 from model.ASTRA.astrav3 import ASTRA as ASTRAv3
from model.iTransformer.iTransformer import iTransformer from model.iTransformer.iTransformer import iTransformer
from model.HI.HI import HI
@ -92,3 +94,5 @@ def model_selector(config):
return ASTRAv3(model_config) return ASTRAv3(model_config)
case "iTransformer": case "iTransformer":
return iTransformer(model_config) return iTransformer(model_config)
case "HI":
return HI(model_config)

View File

@ -23,6 +23,9 @@ def init_model(args):
def init_optimizer(model, args): def init_optimizer(model, args):
optimizer = None
lr_scheduler = None
optim = args.get("optimizer", "Adam") optim = args.get("optimizer", "Adam")
match optim : match optim :
case "Adam": case "Adam":