Skip to content

Commit 0e1ac53

Browse files
authored
[ENH] TimeXer model from thuml (#1797)
### Description This PR works on #1793 and aims to align and implement the `TimeXer` model within PTF's design.
1 parent 61156fd commit 0e1ac53

File tree

6 files changed

+1477
-0
lines changed

6 files changed

+1477
-0
lines changed

pytorch_forecasting/models/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
TemporalFusionTransformer,
2020
)
2121
from pytorch_forecasting.models.tide import TiDEModel
22+
from pytorch_forecasting.models.timexer import TimeXer
2223

2324
__all__ = [
2425
"NBeats",
@@ -37,4 +38,5 @@
3738
"MultiEmbedding",
3839
"DecoderMLP",
3940
"TiDEModel",
41+
"TimeXer",
4042
]
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
"""
2+
TimeXer model for forecasting time series.
3+
"""
4+
5+
from pytorch_forecasting.models.timexer._timexer import TimeXer
6+
from pytorch_forecasting.models.timexer._timexer_metadata import TimeXerMetadata
7+
from pytorch_forecasting.models.timexer.sub_modules import (
8+
AttentionLayer,
9+
DataEmbedding_inverted,
10+
Encoder,
11+
EncoderLayer,
12+
EnEmbedding,
13+
FlattenHead,
14+
FullAttention,
15+
PositionalEmbedding,
16+
TriangularCausalMask,
17+
)
18+
19+
__all__ = [
20+
"TimeXer",
21+
"TriangularCausalMask",
22+
"FullAttention",
23+
"AttentionLayer",
24+
"DataEmbedding_inverted",
25+
"PositionalEmbedding",
26+
"FlattenHead",
27+
"EnEmbedding",
28+
"Encoder",
29+
"EncoderLayer",
30+
"TimeXerMetadata",
31+
]

0 commit comments

Comments
 (0)