# model/InformerOnlyX/embed.py import torch import torch.nn as nn import math class PositionalEmbedding(nn.Module): def __init__(self, d_model, max_len=5000): super().__init__() pe = torch.zeros(max_len, d_model) position = torch.arange(0, max_len).unsqueeze(1).float() div_term = torch.exp( torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model) ) pe[:, 0::2] = torch.sin(position * div_term) pe[:, 1::2] = torch.cos(position * div_term) self.register_buffer("pe", pe.unsqueeze(0)) # [1, L, D] def forward(self, x): return self.pe[:, :x.size(1)] class DataEmbedding(nn.Module): """ Informer-style embedding without time covariates """ def __init__(self, c_in, d_model, dropout): super().__init__() self.value_embedding = nn.Linear(c_in, d_model) self.position_embedding = PositionalEmbedding(d_model) self.dropout = nn.Dropout(dropout) def forward(self, x): x = self.value_embedding(x) + self.position_embedding(x) return self.dropout(x)