From 09751a74f68abb267746ad08769ceb9257d2d910 Mon Sep 17 00:00:00 2001 From: Wenjie Du Date: Tue, 3 Sep 2024 16:59:01 +0800 Subject: [PATCH] Update the docs for TimeMixer (#500) --- .idea/icon.png | Bin 0 -> 5373 bytes README.md | 87 +++++++++++++------------- README_zh.md | 87 +++++++++++++------------- docs/examples.rst | 16 +++-- docs/index.rst | 6 +- docs/pypots.imputation.rst | 9 +++ pypots/data/load_specific_datasets.py | 4 +- tests/imputation/timemixer.py | 4 +- 8 files changed, 114 insertions(+), 99 deletions(-) create mode 100644 .idea/icon.png diff --git a/.idea/icon.png b/.idea/icon.png new file mode 100644 index 0000000000000000000000000000000000000000..0a58e1cc5446786d166679f273486575e2b556ab GIT binary patch literal 5373 zcmZ`-XEYlC+l>(;LNzumLaf+Z%_4Th-qc=2sa@1eG-{^yC}LBqYIbOi5~ZZome{G; zmQvJe@%26D{m%LRd_SIZ@44sRd!ApWxxpH1_1y7Mk7OnB>+I7^KW$NsQxL@ z(#H({V9y+Bt#^5O`H!BTpYQJOzIyd4H8u6<=%}Wq#@E*u3WaWOZxe|`K0ZEeZS8@9 z0SpG?=H{lUsX0GC-__M+WMuT`&!5iDP9P9iP*5;6HO0WdproX9a&mI}_U+>0Vh98x zCMGsLJ-xrbPa=_=ot>AKmPSTK7#SInNaX$d_c=H?+S=N1I9yCj%(rjf$Yk=w!~_C? z=lphczE!5yrZL| zjg3unb8}f)nVFecR#sM0Qj)s5x~i(GgoK2Ffq}HN^y=!WmzS5TYf2pez(O!WXj@0m zZ7=xS&E4coB_Gp67zA$rU?zcSs7cCtxMVCt@8`?Q;F99XL^etwblBN-`&XxISQJEf z949ey<(j>}+IVOF)TpGwVFa?5o?1d7TmERO#)ejC#w^AJDNB zy1T-W9Cqb@k9fH=HKN^O!!&Ar z^LH`DN~r`@O`I@mfBwB5!Dv84)MlhfolP3wg{?aSInEo%mHv}+8}ZGWQxeA-op@fe z4|Ifd`?t}nz#MD5XZPF?+ZSC0k!{sW{tCY;JrJhVZAY6lROMSi7FCM%LW|zO-B0lQ zEdjt_k4ym)kYQnE@mXPiR>7N6RL-`Z8llqc_q1JFEb(}?X!}7>46#SowVEs0y$y9V z%nbs!W%R$12I@2zA4S}y8G40eD8!I|T_Md0fn%>a%4mV_=__#r>iQXVnj3)7Y8|f! zSomQKZ>fADz9KtcT?Sh!Ww6C z;l>8Au;bMIS|W26@g}!($wt8?lbw(hXPzl{j{H|A?)OGkmE*I2->EPsyO*XqOKotk zb1$rCET)itm>g%-_P&U+xx|nTGB_D=R>XULsCgpBtE8Ya|3%N98k4b#>xV7A3s$fs zcQ;ilBCHP(g|ceelyC1Z1yoyf(55DmwtQzEuU|r$M@r0jxHB7c{o*3QO_2AV31`o= z-CCCJn#b&Kp!EjvR~3y5cB7qPk*!$t zj5u?nN^vE$p4elQ>%G~Jibvlm6jrlzkG?Pp(D_MWwP=tvbq=^We0Lh54NQJ+J5`r8 zE_3Jyq4`;IQA#7FcF)z4`5n+v+>$Dls$|%=111|582_w`m>TjtewXWvt;h^VyLHQ7 z^=(Q*SxKuztsMGV3;x2$`P69j@y+9KnaS&n3~m`!q}30Ke}N9E6=GPOtK3gg((=AR zV*HySXoZ%>N0dMFMq(Zd;b$(hrBpaF1zlSHtbUL{t7aZhJ?FTz_wpEmkrzwUd0+Lm zL>P+vSrp;B{WxX5v@E1_q(mOyoNV$apRK7aNcLf7_(`tftzjyu_9`^(O5|8ajpZ4A zqGjvna(t|68@m>tXS5={1CQ$44wLWI)8NDhk6jD10dXI;H>v!S4%%Su?9Sy9M*Z;= zdCN9-9aQ`a*lcOW)~-ir$&i9FG_DM!Zs)!KfTH4sVQyJsSPMUX{JHO-F^}=iY;?kF zXIvfk`XUHoI}k+7TZ?|#&jzM3c`q33^|ztMLp^@@(y_xjk3`fucC8Cgjk@F#PCwYm zPYyQbNHP7CctE*)A4rB3KeFB`x69d!sH=C{W}_;frpzRd-rNdI!SQ>~>rSb|1R5V) zk(+bEQODI@tlOfh#6DG8&F-_DRhkg^it!(NOtC_8ln=JfCw}-AMOIH>W4=gfWpvzrtf*seLGX z4o@liR|Mg4b2b1LPr=LiG2% z7O5RBGsChx>%_1$L*9~Imz()%T^VfOgv<;N!$SC#ar^_y=aWK}^G-2?>fyX4H}ET# zNv-}mk5EbW!_ht7>0q{}Yz4GTwZ~{6FesCn@iw}b39eAf8(v&1q_Et5Pe(TGHsbAz zvewf4hGLe4eUl-2J!w7RrYnnG89d5Kg6~y%CUoFu>7O|C%6nnza}vP-xQW?p($DP# zmhwF`&X+T*6O7JLDv@!6At`glR<2JFvzpB;yM^X*P=kp<rzk<)SF+ zOObP18WbOHVe}!w6bK>NVPdQfaL){B_*;oL`!xF-f4JI^%8VesQ2MsiN&Jq%(gvwZ zbX8kl_?skjdaP8eTH*XGC&V*UXaA?}jqQZEC}$-T`!6#S?pvQKc3F0c)7m`x)6gRy z&!AK;c5;xqI+W;wSntfqRGdzoR~tGw&IyLK3H#8iXpZkeQ!(m4e>Xw@?d0lHN}T;U zbzl*3`oOYcjfNZ1ai!@<)_tpPEaRM0Gsitfigi}p!(-|hNNGDy#sQvx5gOI%ppx9N~WQeKdG={ZL3EC==!7kUh_LrGThlbo#3!>O_o5YNU}~BDE)1Ze24TiHv33Z zsIJ2a(r4%JTjxe^`gYW;^++eSdEQCqwcPxLYLc1FxGU*r*bNcuh-I zz38lks-jJFLnR2Lv^*PEcPm9fM(2iS&Nk_%rQ*LPy(H{lNAeM%SK2#5N!le1?Tye& zQ^@AEdqOr^($4((VQiHN`m4)=m0xTSj@ypOO`>1OHQN1^HA)AG-F=Va`>8_atkUAl zd$GujokO)a27COO^YrVFTxuh6=QkQMnsT^m-pq7M*+4Z7d2PH1dX~{BsV?L?uX3ivV;SI&tQno}DR^6)~*6m$` z=Gq~wM1!Et@pQ*JOlm_f1Z`J<+dz*NsypbS&6t9z)a32`n#Vf0@~T*fH3tMXP71 zzx6otx@S%7rguQS?KEx0mA0_a`dm8TZ0iL$ebBM{)kgl7xYt33!J6O+VX>Gs0yBNj z$8c#2wj!@);M2+0ac?B|jR^J4x+e&GeBj?vb-_($=B``eY>yrts84-5h&H)sk07ee ztC)0|VD?*SKfd*;e)$yl{4$S|Ecs`L0Ef*zi7xS-pkCTL3_$W}ll;!nDM*dypvGU; z{B{p%u5m6S_me^?nWR1w;D=2bsxu=hP;C8>o|}uQ68yP#@lZ*ZAaK_-r`Pl5lH8c}*E}MoA;lnJNR~;r<~2 z=4P{pp)F1Tr8NkAJ8fwPKZv2ovL>e&qvy(u$!_(k<55?hI~-V#E%ez0zF}_Ro;bCpV~p6P_tu4OF2jF)7$WM8B8E zFvjj_;cV?w-_y4w!Gt++w$aq3@u7^L-0%noa@5|N<)fbn@gRQi(B35w35GjL)(Fs5 zVRgK%#IW|P_@4`xi+S{O>mhSU_D97zo0R(Zvx`)WE)Km>4Z75J5D}y{-HkMsFHkf~ z8jCO#ojgphNH>t9LIw>p=PHwd!{&v`?o1wA?@B`$Jp2fyA#}2^b<=1{+0Hf7Xe!y^ zbyLw1DJo*2G8sM`!XG@SG;IDznJhMJ{x7UKTuCTx2SUNHC7&%Fx(XP|9?jehLm7Hw zSF|_K(#+b98)%QR&!5&x0?vEj)H%oAq6cJBnVK}DUmH-U2@6-XDAENE{5X}FnY*iV zLy4Oyu20fP^8E*Oqaytk_@m`@_?xlcO>6XxX_d9!X3^=1-W?qx){N9YO8=TAqnMi& zPg!^c&8OU>c~7~txiL_(24&t6P7(wP;9m~T`iPh2ci+tL+jK>}^8xMW)dsDYd|HthM zfqk9kf&{$QhfKN#H|IYUJ?+fp3)Rtv)!1=7>-Xfa$L?ddEsoxVrx--fxdZYRkauh~Z((3guY`|EX>oq{*8 z3oxY6Maykm&WKyT?-=m+3Hd9EdT5EnXWTNWDw4izI<_c zQq_mo`jJ6EVsUJin&A9e*}yaT$I)*mD&ga}(~u0VYQ4+aB48__&#pE7>06GM;(iVg zS5Iuzy5^FFQgn9^ctyD8W-*Gpz>X8 z-$BXZc(w9cR#qb=DG$CQ97W9%9Pa6rO(SgjGccKiuRWaj1(VvvUWhpAtGR`)ePKpE zDj@O}LHfFAHf#zQsjn=i^a&r|db2&6lFQBS!gBsFs#G(fwT^A;6eNp2uCY zLG({iutl__qMSH z9J1nA*o^BhJ_p7ZzRPU;qnh=!8aDVQd{^U7Oh(3z7S#D&pXO=H#j0h&iyR4!o+wLJ zI`630_p5Y$)?c;chi4=gJbcxabl6?%wBkPhFe{1nhci`X5X0S-JoHogYdN(lQ@^(; z!>@dC0D5?lWsEt6G9dc&)o6=BQTIFEm z=a5ZAiK_px?)^ag3lFrh{HvjPjr!NX`B(L}oP)?1U4gm2vC7F) Time-Series.AI [^36] | ✅ | ✅ | ✅ | ✅ | ✅ | `Later in 2024` | +| Neural Net | TimeMixer[^37] | ✅ | | | | | `2024 - ICLR` | +| Neural Net | iTransformer🧑‍🔧[^24] | ✅ | | | | | `2024 - ICLR` | +| Neural Net | ImputeFormer🧑‍🔧[^34] | ✅ | | | | | `2024 - KDD` | +| Neural Net | SAITS[^1] | ✅ | | | | | `2023 - ESWA` | +| Neural Net | FreTS🧑‍🔧[^23] | ✅ | | | | | `2023 - NeurIPS` | +| Neural Net | Koopa🧑‍🔧[^29] | ✅ | | | | | `2023 - NeurIPS` | +| Neural Net | Crossformer🧑‍🔧[^16] | ✅ | | | | | `2023 - ICLR` | +| Neural Net | TimesNet[^14] | ✅ | | | | | `2023 - ICLR` | +| Neural Net | PatchTST🧑‍🔧[^18] | ✅ | | | | | `2023 - ICLR` | +| Neural Net | ETSformer🧑‍🔧[^19] | ✅ | | | | | `2023 - ICLR` | +| Neural Net | MICN🧑‍🔧[^27] | ✅ | | | | | `2023 - ICLR` | +| Neural Net | DLinear🧑‍🔧[^17] | ✅ | | | | | `2023 - AAAI` | +| Neural Net | TiDE🧑‍🔧[^28] | ✅ | | | | | `2023 - TMLR` | +| Neural Net | SCINet🧑‍🔧[^30] | ✅ | | | | | `2022 - NeurIPS` | +| Neural Net | Nonstationary Tr.🧑‍🔧[^25] | ✅ | | | | | `2022 - NeurIPS` | +| Neural Net | FiLM🧑‍🔧[^22] | ✅ | | | | | `2022 - NeurIPS` | +| Neural Net | RevIN_SCINet🧑‍🔧[^31] | ✅ | | | | | `2022 - ICLR` | +| Neural Net | Pyraformer🧑‍🔧[^26] | ✅ | | | | | `2022 - ICLR` | +| Neural Net | Raindrop[^5] | | | ✅ | | | `2022 - ICLR` | +| Neural Net | FEDformer🧑‍🔧[^20] | ✅ | | | | | `2022 - ICML` | +| Neural Net | Autoformer🧑‍🔧[^15] | ✅ | | | | | `2021 - NeurIPS` | +| Neural Net | CSDI[^12] | ✅ | ✅ | | | | `2021 - NeurIPS` | +| Neural Net | Informer🧑‍🔧[^21] | ✅ | | | | | `2021 - AAAI` | +| Neural Net | US-GAN[^10] | ✅ | | | | | `2021 - AAAI` | +| Neural Net | CRLI[^6] | | | | ✅ | | `2021 - AAAI` | +| Probabilistic | BTTF[^8] | | ✅ | | | | `2021 - TPAMI` | +| Neural Net | StemGNN🧑‍🔧[^33] | ✅ | | | | | `2020 - NeurIPS` | +| Neural Net | Reformer🧑‍🔧[^32] | ✅ | | | | | `2020 - ICLR` | +| Neural Net | GP-VAE[^11] | ✅ | | | | | `2020 - AISTATS` | +| Neural Net | VaDER[^7] | | | | ✅ | | `2019 - GigaSci.` | +| Neural Net | M-RNN[^9] | ✅ | | | | | `2019 - TBME` | +| Neural Net | BRITS[^3] | ✅ | | ✅ | | | `2018 - NeurIPS` | +| Neural Net | GRU-D[^4] | ✅ | | ✅ | | | `2018 - Sci. Rep.` | +| Neural Net | TCN🧑‍🔧[^35] | ✅ | | | | | `2018 - arXiv` | +| Neural Net | Transformer🧑‍🔧[^2] | ✅ | | | | | `2017 - NeurIPS` | +| Naive | Lerp | ✅ | | | | | | +| Naive | LOCF/NOCB | ✅ | | | | | | +| Naive | Mean | ✅ | | | | | | +| Naive | Median | ✅ | | | | | | 💯 Contribute your model right now to increase your research impact! PyPOTS downloads are increasing rapidly (**[300K+ in total and 1K+ daily on PyPI so far](https://www.pepy.tech/projects/pypots)**), and your work will be widely used and cited by the community. @@ -320,7 +321,7 @@ By committing your code, you'll [pypots/imputation/template](https://github.com/WenjieDu/PyPOTS/tree/main/pypots/imputation/template)) to quickly start; 2. become one of [PyPOTS contributors](https://github.com/WenjieDu/PyPOTS/graphs/contributors) and be listed as a volunteer developer [on the PyPOTS website](https://pypots.com/about/#volunteer-developers); -3. get mentioned in our [release notes](https://github.com/WenjieDu/PyPOTS/releases); +3. get mentioned in PyPOTS [release notes](https://github.com/WenjieDu/PyPOTS/releases); You can also contribute to PyPOTS by simply staring🌟 this repo to help more people notice it. Your star is your recognition to PyPOTS, and it matters! @@ -394,3 +395,5 @@ PyPOTS community is open, transparent, and surely friendly. Let's work together [^35]: Bai, S., Kolter, J. Z., & Koltun, V. (2018). [An empirical evaluation of generic convolutional and recurrent networks for sequence modeling](https://arxiv.org/abs/1803.01271). *arXiv 2018*. [^36]: Project Gungnir, the world 1st LLM for time-series multitask modeling, will meet you soon. 🚀 Missing values and variable lengths in your datasets? Hard to perform multitask learning with your time series? Not problems no longer. We'll open application for public beta test recently ;-) Follow us, and stay tuned! + Time-Series.AI +[^37]: Wang, S., Wu, H., Shi, X., Hu, T., Luo, H., Ma, L., ... & ZHOU, J. (2024). [TimeMixer: Decomposable Multiscale Mixing for Time Series Forecasting](https://openreview.net/forum?id=7oLshfEIC2). *ICLR 2024* diff --git a/README_zh.md b/README_zh.md index b04dd28d..d3f760cc 100644 --- a/README_zh.md +++ b/README_zh.md @@ -99,47 +99,48 @@ PyPOTS当前支持多变量POTS数据的插补,预测,分类,聚类以及 所以这些模型的输入中不能带有缺失值,无法接受POTS数据作为输入,更加不是插补算法。 **为了使上述模型能够适用于POTS数据,我们采用了与[SAITS论文](https://arxiv.org/pdf/2202.08516)[^1]中相同的embedding策略和训练方法(ORT+MIT)对它们进行改进**。 -| **类型** | **算法** | **插补** | **预测** | **分类** | **聚类** | **异常检测** | **年份 - 刊物** | -|:--------------|:----------------------------|:------:|:------:|:------:|:------:|:--------:|:-------------------| -| LLM | Gungnir 🚀 [^36] | ✅ | ✅ | ✅ | ✅ | ✅ | `Later in 2024` | -| Neural Net | ImputeFormer🧑‍🔧[^34] | ✅ | | | | | `2024 - KDD` | -| Neural Net | iTransformer🧑‍🔧[^24] | ✅ | | | | | `2024 - ICLR` | -| Neural Net | SAITS[^1] | ✅ | | | | | `2023 - ESWA` | -| Neural Net | FreTS🧑‍🔧[^23] | ✅ | | | | | `2023 - NeurIPS` | -| Neural Net | Koopa🧑‍🔧[^29] | ✅ | | | | | `2023 - NeurIPS` | -| Neural Net | Crossformer🧑‍🔧[^16] | ✅ | | | | | `2023 - ICLR` | -| Neural Net | TimesNet[^14] | ✅ | | | | | `2023 - ICLR` | -| Neural Net | PatchTST🧑‍🔧[^18] | ✅ | | | | | `2023 - ICLR` | -| Neural Net | ETSformer🧑‍🔧[^19] | ✅ | | | | | `2023 - ICLR` | -| Neural Net | MICN🧑‍🔧[^27] | ✅ | | | | | `2023 - ICLR` | -| Neural Net | DLinear🧑‍🔧[^17] | ✅ | | | | | `2023 - AAAI` | -| Neural Net | TiDE🧑‍🔧[^28] | ✅ | | | | | `2023 - TMLR` | -| Neural Net | SCINet🧑‍🔧[^30] | ✅ | | | | | `2022 - NeurIPS` | -| Neural Net | Nonstationary Tr.🧑‍🔧[^25] | ✅ | | | | | `2022 - NeurIPS` | -| Neural Net | FiLM🧑‍🔧[^22] | ✅ | | | | | `2022 - NeurIPS` | -| Neural Net | RevIN_SCINet🧑‍🔧[^31] | ✅ | | | | | `2022 - ICLR` | -| Neural Net | Pyraformer🧑‍🔧[^26] | ✅ | | | | | `2022 - ICLR` | -| Neural Net | Raindrop[^5] | | | ✅ | | | `2022 - ICLR` | -| Neural Net | FEDformer🧑‍🔧[^20] | ✅ | | | | | `2022 - ICML` | -| Neural Net | Autoformer🧑‍🔧[^15] | ✅ | | | | | `2021 - NeurIPS` | -| Neural Net | CSDI[^12] | ✅ | ✅ | | | | `2021 - NeurIPS` | -| Neural Net | Informer🧑‍🔧[^21] | ✅ | | | | | `2021 - AAAI` | -| Neural Net | US-GAN[^10] | ✅ | | | | | `2021 - AAAI` | -| Neural Net | CRLI[^6] | | | | ✅ | | `2021 - AAAI` | -| Probabilistic | BTTF[^8] | | ✅ | | | | `2021 - TPAMI` | -| Neural Net | StemGNN🧑‍🔧[^33] | ✅ | | | | | `2020 - NeurIPS` | -| Neural Net | Reformer🧑‍🔧[^32] | ✅ | | | | | `2020 - ICLR` | -| Neural Net | GP-VAE[^11] | ✅ | | | | | `2020 - AISTATS` | -| Neural Net | VaDER[^7] | | | | ✅ | | `2019 - GigaSci.` | -| Neural Net | M-RNN[^9] | ✅ | | | | | `2019 - TBME` | -| Neural Net | BRITS[^3] | ✅ | | ✅ | | | `2018 - NeurIPS` | -| Neural Net | GRU-D[^4] | ✅ | | ✅ | | | `2018 - Sci. Rep.` | -| Neural Net | TCN🧑‍🔧[^35] | ✅ | | | | | `2018 - arXiv` | -| Neural Net | Transformer🧑‍🔧[^2] | ✅ | | | | | `2017 - NeurIPS` | -| Naive | Lerp | ✅ | | | | | | -| Naive | LOCF/NOCB | ✅ | | | | | | -| Naive | Mean | ✅ | | | | | | -| Naive | Median | ✅ | | | | | | +| **类型** | **算法** | **插补** | **预测** | **分类** | **聚类** | **异常检测** | **年份 - 刊物** | +|:--------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------|:------:|:------:|:------:|:------:|:--------:|:-------------------| +| LLM | Time-Series.AI [^36] | ✅ | ✅ | ✅ | ✅ | ✅ | `Later in 2024` | +| Neural Net | TimeMixer[^37] | ✅ | | | | | `2024 - ICLR` | +| Neural Net | iTransformer🧑‍🔧[^24] | ✅ | | | | | `2024 - ICLR` | +| Neural Net | ImputeFormer🧑‍🔧[^34] | ✅ | | | | | `2024 - KDD` | +| Neural Net | SAITS[^1] | ✅ | | | | | `2023 - ESWA` | +| Neural Net | FreTS🧑‍🔧[^23] | ✅ | | | | | `2023 - NeurIPS` | +| Neural Net | Koopa🧑‍🔧[^29] | ✅ | | | | | `2023 - NeurIPS` | +| Neural Net | Crossformer🧑‍🔧[^16] | ✅ | | | | | `2023 - ICLR` | +| Neural Net | TimesNet[^14] | ✅ | | | | | `2023 - ICLR` | +| Neural Net | PatchTST🧑‍🔧[^18] | ✅ | | | | | `2023 - ICLR` | +| Neural Net | ETSformer🧑‍🔧[^19] | ✅ | | | | | `2023 - ICLR` | +| Neural Net | MICN🧑‍🔧[^27] | ✅ | | | | | `2023 - ICLR` | +| Neural Net | DLinear🧑‍🔧[^17] | ✅ | | | | | `2023 - AAAI` | +| Neural Net | TiDE🧑‍🔧[^28] | ✅ | | | | | `2023 - TMLR` | +| Neural Net | SCINet🧑‍🔧[^30] | ✅ | | | | | `2022 - NeurIPS` | +| Neural Net | Nonstationary Tr.🧑‍🔧[^25] | ✅ | | | | | `2022 - NeurIPS` | +| Neural Net | FiLM🧑‍🔧[^22] | ✅ | | | | | `2022 - NeurIPS` | +| Neural Net | RevIN_SCINet🧑‍🔧[^31] | ✅ | | | | | `2022 - ICLR` | +| Neural Net | Pyraformer🧑‍🔧[^26] | ✅ | | | | | `2022 - ICLR` | +| Neural Net | Raindrop[^5] | | | ✅ | | | `2022 - ICLR` | +| Neural Net | FEDformer🧑‍🔧[^20] | ✅ | | | | | `2022 - ICML` | +| Neural Net | Autoformer🧑‍🔧[^15] | ✅ | | | | | `2021 - NeurIPS` | +| Neural Net | CSDI[^12] | ✅ | ✅ | | | | `2021 - NeurIPS` | +| Neural Net | Informer🧑‍🔧[^21] | ✅ | | | | | `2021 - AAAI` | +| Neural Net | US-GAN[^10] | ✅ | | | | | `2021 - AAAI` | +| Neural Net | CRLI[^6] | | | | ✅ | | `2021 - AAAI` | +| Probabilistic | BTTF[^8] | | ✅ | | | | `2021 - TPAMI` | +| Neural Net | StemGNN🧑‍🔧[^33] | ✅ | | | | | `2020 - NeurIPS` | +| Neural Net | Reformer🧑‍🔧[^32] | ✅ | | | | | `2020 - ICLR` | +| Neural Net | GP-VAE[^11] | ✅ | | | | | `2020 - AISTATS` | +| Neural Net | VaDER[^7] | | | | ✅ | | `2019 - GigaSci.` | +| Neural Net | M-RNN[^9] | ✅ | | | | | `2019 - TBME` | +| Neural Net | BRITS[^3] | ✅ | | ✅ | | | `2018 - NeurIPS` | +| Neural Net | GRU-D[^4] | ✅ | | ✅ | | | `2018 - Sci. Rep.` | +| Neural Net | TCN🧑‍🔧[^35] | ✅ | | | | | `2018 - arXiv` | +| Neural Net | Transformer🧑‍🔧[^2] | ✅ | | | | | `2017 - NeurIPS` | +| Naive | Lerp | ✅ | | | | | | +| Naive | LOCF/NOCB | ✅ | | | | | | +| Naive | Mean | ✅ | | | | | | +| Naive | Median | ✅ | | | | | | 💯 现在贡献你的模型来增加你的研究影响力!PyPOTS的下载量正在迅速增长(**[目前PyPI上总共超过30万次且每日超1000的下载](https://www.pepy.tech/projects/pypots)**), 你的工作将被社区广泛使用和引用。请参阅[贡献指南](https://github.com/WenjieDu/PyPOTS/blob/main/README_zh.md#-%E8%B4%A1%E7%8C%AE%E5%A3%B0%E6%98%8E),了解如何将模型包含在PyPOTS中。 @@ -292,7 +293,7 @@ year={2023}, [pypots/imputation/template](https://github.com/WenjieDu/PyPOTS/tree/main/pypots/imputation/template))快速启动你的开发; 2. 成为[PyPOTS贡献者](https://github.com/WenjieDu/PyPOTS/graphs/contributors)之一, 并在[PyPOTS网站](https://pypots.com/about/#volunteer-developers)上被列为志愿开发者; -3. 在我们发布新版本的[更新日志](https://github.com/WenjieDu/PyPOTS/releases)中被提及; +3. 在PyPOTS发布新版本的[更新日志](https://github.com/WenjieDu/PyPOTS/releases)中被提及; 你也可以通过为该项目设置星标🌟,帮助更多人关注它。你的星标🌟既是对PyPOTS的认可,也是对PyPOTS发展所做出的重要贡献! @@ -363,3 +364,5 @@ PyPOTS社区是一个开放、透明、友好的社区,让我们共同努力 [^34]: Nie, T., Qin, G., Mei, Y., & Sun, J. (2024). [ImputeFormer: Low Rankness-Induced Transformers for Generalizable Spatiotemporal Imputation](https://arxiv.org/abs/2312.01728). *KDD 2024*. [^35]: Bai, S., Kolter, J. Z., & Koltun, V. (2018). [An empirical evaluation of generic convolutional and recurrent networks for sequence modeling](https://arxiv.org/abs/1803.01271). *arXiv 2018*. [^36]: Gungnir项目,世界上第一个时间序列多任务大模型,将很快与大家见面。🚀 数据集存在缺少值且样本长短不一?多任务建模场景困难?都不再是问题,让我们的大模型来帮你解决。我们将在近期开放公测申请 ;-) 关注我们,敬请期待! + Time-Series.AI +[^37]: Wang, S., Wu, H., Shi, X., Hu, T., Luo, H., Ma, L., ... & ZHOU, J. (2024). [TimeMixer: Decomposable Multiscale Mixing for Time Series Forecasting](https://openreview.net/forum?id=7oLshfEIC2). *ICLR 2024* diff --git a/docs/examples.rst b/docs/examples.rst index d7d6a1e2..5101eba8 100644 --- a/docs/examples.rst +++ b/docs/examples.rst @@ -29,15 +29,13 @@ You can also find a simple and quick-start tutorial notebook on Google Colab # Data preprocessing. Tedious, but PyPOTS can help. 🤓 data = load_specific_dataset('physionet_2012') # PyPOTS will automatically download and extract it. - X = data['X'] - num_samples = len(X['RecordID'].unique()) - X = X.drop(['RecordID', 'Time'], axis = 1) - X = StandardScaler().fit_transform(X.to_numpy()) - X = X.reshape(num_samples, 48, -1) + X = data['train_X'] + num_samples = len(X) + X = StandardScaler().fit_transform(X.reshape(-1, X.shape[-1])).reshape(X.shape) X_ori = X # keep X_ori for validation X = mcar(X, 0.1) # randomly hold out 10% observed values as ground truth dataset = {"X": X} # X for model input - print(X.shape) # (11988, 48, 37), 11988 samples, 48 time steps, 37 features + print(X.shape) # (7671, 48, 37), 7671 samples, 48 time steps, 37 features # initialize the model saits = SAITS( @@ -55,7 +53,7 @@ You can also find a simple and quick-start tutorial notebook on Google Colab model_saving_strategy="best", # only save the model with the best validation performance ) - # train the model. Here I use the whole dataset as the training set, because ground truth is not visible to the model. + # train the model. Here I consider the train dataset only, and evaluate on it, because ground truth is not visible to the model. saits.fit(dataset) # impute the originally-missing values and artificially-missing values imputation = saits.impute(dataset) @@ -64,6 +62,6 @@ You can also find a simple and quick-start tutorial notebook on Google Colab mae = calc_mae(imputation, np.nan_to_num(X_ori), indicating_mask) # calculate mean absolute error on the ground truth (artificially-missing values) # the best model has been already saved, but you can still manually save it with function save_model() as below - saits.save_model(saving_dir="examples/saits",file_name="manually_saved_saits_model") + saits.save(saving_path="examples/saits/manually_saved_saits_model") # you can load the saved model into a new initialized model - saits.load_model("examples/saits/manually_saved_saits_model") + saits.load("examples/saits/manually_saved_saits_model.pypots") diff --git a/docs/index.rst b/docs/index.rst index d6c76d57..c0204b42 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -133,10 +133,12 @@ The paper references are all listed at the bottom of this readme file. +----------------+-----------------------------------------------------------+------+------+------+------+------+-----------------------+ | Type | Algorithm | IMPU | FORE | CLAS | CLUS | ANOD | Year - Venue | +================+===========================================================+======+======+======+======+======+=======================+ -| Neural Net | ImputeFormer :cite:`nie2024imputeformer` | ✅ | | | | | ``2024 - KDD`` | +| Neural Net | TimeMixer :cite:`wang2024timemixer` | ✅ | | | | | ``2024 - ICLR`` | +----------------+-----------------------------------------------------------+------+------+------+------+------+-----------------------+ | Neural Net | iTransformer🧑‍🔧 :cite:`liu2024itransformer` | ✅ | | | | | ``2024 - ICLR`` | +----------------+-----------------------------------------------------------+------+------+------+------+------+-----------------------+ +| Neural Net | ImputeFormer :cite:`nie2024imputeformer` | ✅ | | | | | ``2024 - KDD`` | ++----------------+-----------------------------------------------------------+------+------+------+------+------+-----------------------+ | Neural Net | SAITS :cite:`du2023SAITS` | ✅ | | | | | ``2023 - ESWA`` | +----------------+-----------------------------------------------------------+------+------+------+------+------+-----------------------+ | Neural Net | FreTS🧑‍🔧 :cite:`yi2023frets` | ✅ | | | | | ``2023 - NeurIPS`` | @@ -333,7 +335,7 @@ By committing your code, you'll `pypots/imputation/template `_) to quickly start; 2. become one of `PyPOTS contributors `_ and be listed as a volunteer developer `on the PyPOTS website `_; -3. get mentioned in our `release notes `_; +3. get mentioned in PyPOTS `release notes `_; You can also contribute to PyPOTS by simply staring🌟 this repo to help more people notice it. Your star is your recognition to PyPOTS, and it matters! diff --git a/docs/pypots.imputation.rst b/docs/pypots.imputation.rst index d994f96a..8af2d63f 100644 --- a/docs/pypots.imputation.rst +++ b/docs/pypots.imputation.rst @@ -19,6 +19,15 @@ pypots.imputation.transformer :show-inheritance: :inherited-members: +pypots.imputation.timemixer +------------------------------------ + +.. automodule:: pypots.imputation.timemixer + :members: + :undoc-members: + :show-inheritance: + :inherited-members: + pypots.imputation.imputeformer ------------------------------------ diff --git a/pypots/data/load_specific_datasets.py b/pypots/data/load_specific_datasets.py index 69e79615..50c6c297 100644 --- a/pypots/data/load_specific_datasets.py +++ b/pypots/data/load_specific_datasets.py @@ -35,7 +35,7 @@ def list_supported_datasets() -> list: def load_specific_dataset(dataset_name: str, use_cache: bool = True) -> dict: """Load specific datasets supported by PyPOTS. - Different from tsdb.load_dataset(), which only produces merely raw data, + Different from tsdb.load(), which only produces merely raw data, load_specific_dataset here does some preprocessing operations, like truncating time series to generate samples with the same length. @@ -45,7 +45,7 @@ def load_specific_dataset(dataset_name: str, use_cache: bool = True) -> dict: The name of the dataset to be loaded, which should be supported, i.e. in SUPPORTED_DATASETS. use_cache : - Whether to use cache. This is an argument of tsdb.load_dataset(). + Whether to use cache. This is an argument of tsdb.load(). Returns ------- diff --git a/tests/imputation/timemixer.py b/tests/imputation/timemixer.py index bf5c72cd..a0735663 100644 --- a/tests/imputation/timemixer.py +++ b/tests/imputation/timemixer.py @@ -47,8 +47,8 @@ class TestTimeMixer(unittest.TestCase): DATA["n_features"], n_layers=2, top_k=5, - d_model=512, - d_ffn=512, + d_model=32, + d_ffn=32, dropout=0.1, epochs=EPOCHS, saving_path=saving_path,