From d750265054084b2b41c717c9926d4ea9e4435665 Mon Sep 17 00:00:00 2001 From: Ming Jin Date: 2024年3月18日 21:32:05 +1100 Subject: [PATCH 01/16] Update README.md --- README.md | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a3922d25..38e54282 100644 --- a/README.md +++ b/README.md @@ -101,7 +101,7 @@ bash ./scripts/TimeLLM_ETTm2.sh ## Detailed usage -Please refer to ```run_main.py``` and ```run_m4.py``` for the detailed description of each hyperparameter. +Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for the detailed description of each hyperparameter. ## Further Reading @@ -147,6 +147,19 @@ Please refer to ```run_main.py``` and ```run_m4.py``` for the detailed descripti } ``` +4, [**TimeMixer: Decomposable Multiscale Mixing for Time Series Forecasting**](https://openreview.net/pdf?id=7oLshfEIC2), in ICLR 2024. +[\[GitHub Repo\]](https://github.com/kwuking/TimeMixer) + +**Authors**: Shiyu Wang, Haixu Wu, Xiaoming Shi, Tengge Hu, Huakun Luo, Lintao Ma, James Y. Zhang, Jun Zhou + +```bibtex +@inproceedings{wang2023timemixer, + title={TimeMixer: Decomposable Multiscale Mixing for Time Series Forecasting}, + author={Wang, Shiyu and Wu, Haixu and Shi, Xiaoming and Hu, Tengge and Luo, Huakun and Ma, Lintao and Zhang, James Y and ZHOU, JUN}, + booktitle={International Conference on Learning Representations (ICLR)}, + year={2024} +} +``` ## Acknowledgement -Our implementation adapts [Time-Series-Library](https://github.com/thuml/Time-Series-Library) and [GPT4TS](https://github.com/DAMO-DI-ML/NeurIPS2023-One-Fits-All) as the code base and have extensively modified it to our purposes. We thank the authors for sharing their implementations and related resources. +Our implementation adapts [Time-Series-Library](https://github.com/thuml/Time-Series-Library) and [OFA (GPT4TS)](https://github.com/DAMO-DI-ML/NeurIPS2023-One-Fits-All) as the code base and have extensively modified it to our purposes. We thank the authors for sharing their implementations and related resources. From d3fa8694a5eaee72ade0e3c2b5d855f27a8d7f7d Mon Sep 17 00:00:00 2001 From: Ming Jin Date: 2024年3月18日 23:28:54 +1100 Subject: [PATCH 02/16] Update README.md --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 38e54282..a3c897c2 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,9 @@ } ``` +## Updates +🚩 **News** (March 2024): Time-LLM has been upgraded to serve as a general framework for repurposing a wide range of language models to time series forecasting. It now defaults to supporting Llama-7B and includes compatibility with two additional smaller PLMs (GPT-2 and BERT). Simply adjust `--llm_model` and `--llm_dim` to switch backbones. + ## Introduction Time-LLM is a reprogramming framework to repurpose LLMs for general time series forecasting with the backbone language models kept intact. Notably, we show that time series analysis (e.g., forecasting) can be cast as yet another "language task" that can be effectively tackled by an off-the-shelf LLM. From 0a6ebb7a6e8860d28ebf6b86bf23199cf5d003f1 Mon Sep 17 00:00:00 2001 From: Ming Jin Date: Mon, 1 Apr 2024 17:45:07 +1100 Subject: [PATCH 03/16] Update tools.py --- utils/tools.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/utils/tools.py b/utils/tools.py index fe98a720..b9ee19b3 100644 --- a/utils/tools.py +++ b/utils/tools.py @@ -133,6 +133,7 @@ def cal_accuracy(y_pred, y_true): def del_files(dir_path): shutil.rmtree(dir_path) + def vali(args, accelerator, model, vali_data, vali_loader, criterion, mae_metric): total_loss = [] total_mae_loss = [] @@ -161,7 +162,9 @@ def vali(args, accelerator, model, vali_data, vali_loader, criterion, mae_metric outputs = model(batch_x, batch_x_mark, dec_inp, batch_y_mark)[0] else: outputs = model(batch_x, batch_x_mark, dec_inp, batch_y_mark) - # self.accelerator.wait_for_everyone() + + outputs, batch_y = accelerator.gather_for_metrics((outputs, batch_y)) + f_dim = -1 if args.features == 'MS' else 0 outputs = outputs[:, -args.pred_len:, f_dim:] batch_y = batch_y[:, -args.pred_len:, f_dim:].to(accelerator.device) @@ -205,11 +208,15 @@ def test(args, accelerator, model, train_loader, vali_loader, criterion): None ) accelerator.wait_for_everyone() + outputs = accelerator.gather_for_metrics(outputs) f_dim = -1 if args.features == 'MS' else 0 outputs = outputs[:, -args.pred_len:, f_dim:] pred = outputs true = torch.from_numpy(np.array(y)).to(accelerator.device) batch_y_mark = torch.ones(true.shape).to(accelerator.device) + true = accelerator.gather_for_metrics(true) + batch_y_mark = accelerator.gather_for_metrics(batch_y_mark) + loss = criterion(x[:, :, 0], args.frequency_map, pred[:, :, 0], true, batch_y_mark) model.train() From e9ac7c42c53ab1287e0e223466c579449f12f918 Mon Sep 17 00:00:00 2001 From: Nick Dat Le Date: Mon, 1 Apr 2024 19:46:38 -0700 Subject: [PATCH 04/16] Updated requirements and README.md to use python 3.11 --- README.md | 11 +++++++---- requirements.txt | 9 +++++---- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index a3c897c2..590f8267 100644 --- a/README.md +++ b/README.md @@ -64,18 +64,21 @@ Notably, we show that time series analysis (e.g., forecasting) can be cast as ye

## Requirements -- accelerate==0.20.3 +Use python 3.11 from MiniConda + +- torch==2.2.2 +- accelerate==0.28.0 - einops==0.7.0 - matplotlib==3.7.0 - numpy==1.23.5 - pandas==1.5.3 - scikit_learn==1.2.2 -- scipy==1.5.4 -- torch==2.0.1 +- scipy==1.12.0 - tqdm==4.65.0 - peft==0.4.0 - transformers==4.31.0 -- deepspeed==0.13.0 +- deepspeed==0.14.0 +- sentencepiece==0.2.0 To install all dependencies: ``` diff --git a/requirements.txt b/requirements.txt index d0fe18bb..a4a3f2a9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,12 +1,13 @@ -accelerate==0.20.3 +torch==2.2.2 +accelerate==0.28.0 einops==0.7.0 matplotlib==3.7.0 numpy==1.23.5 pandas==1.5.3 scikit_learn==1.2.2 -scipy==1.5.4 -torch==2.0.1 +scipy==1.12.0 tqdm==4.65.0 peft==0.4.0 transformers==4.31.0 -deepspeed==0.13.0 +deepspeed==0.14.0 +sentencepiece==0.2.0 From a20b7196c64602cebf6e81b9077c93292d73e94e Mon Sep 17 00:00:00 2001 From: Ming Jin Date: 2024年4月23日 13:55:08 +1000 Subject: [PATCH 05/16] Fixed typos in scripts --- scripts/TimeLLM_ECL.sh | 8 ++++---- scripts/TimeLLM_Traffic.sh | 8 ++++---- scripts/TimeLLM_Weather.sh | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/scripts/TimeLLM_ECL.sh b/scripts/TimeLLM_ECL.sh index 9db39fab..ebdde75c 100644 --- a/scripts/TimeLLM_ECL.sh +++ b/scripts/TimeLLM_ECL.sh @@ -11,7 +11,7 @@ d_ff=32 comment='TimeLLM-ECL' -accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ +accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/electricity/ \ @@ -35,7 +35,7 @@ accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_proces --train_epochs $train_epochs \ --model_comment $comment -accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ +accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/electricity/ \ @@ -59,7 +59,7 @@ accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_proces --train_epochs $train_epochs \ --model_comment $comment - accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ + accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/electricity/ \ @@ -83,7 +83,7 @@ accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_proces --train_epochs $train_epochs \ --model_comment $comment - accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ + accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/electricity/ \ diff --git a/scripts/TimeLLM_Traffic.sh b/scripts/TimeLLM_Traffic.sh index fb9dca00..5ded82a9 100644 --- a/scripts/TimeLLM_Traffic.sh +++ b/scripts/TimeLLM_Traffic.sh @@ -11,7 +11,7 @@ d_ff=32 comment='TimeLLM-Traffic' -accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ +accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/traffic/ \ @@ -35,7 +35,7 @@ accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_proces --train_epochs $train_epochs \ --model_comment $comment -accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ +accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/traffic/ \ @@ -59,7 +59,7 @@ accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_proces --train_epochs $train_epochs \ --model_comment $comment - accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ + accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/traffic/ \ @@ -83,7 +83,7 @@ accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_proces --train_epochs $train_epochs \ --model_comment $comment - accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ + accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/traffic/ \ diff --git a/scripts/TimeLLM_Weather.sh b/scripts/TimeLLM_Weather.sh index 046d8b36..204adba1 100644 --- a/scripts/TimeLLM_Weather.sh +++ b/scripts/TimeLLM_Weather.sh @@ -11,7 +11,7 @@ d_ff=32 comment='TimeLLM-Weather' -accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ +accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/weather/ \ @@ -37,7 +37,7 @@ accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_proces --train_epochs $train_epochs \ --model_comment $comment -accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ +accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/weather/ \ @@ -63,7 +63,7 @@ accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_proces --train_epochs $train_epochs \ --model_comment $comment - accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ + accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/weather/ \ @@ -89,7 +89,7 @@ accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_proces --train_epochs 10 \ --model_comment $comment - accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run.py \ + accelerate launch --multi_gpu --mixed_precision bf16 --num_processes $num_process --main_process_port $master_port run_main.py \ --task_name long_term_forecast \ --is_training 1 \ --root_path ./dataset/weather/ \ From 0a17a6fa2f47f3b27915dea9adb7568c1aecaadc Mon Sep 17 00:00:00 2001 From: Ming Jin Date: 2024年4月23日 14:47:11 +1000 Subject: [PATCH 06/16] Update README.md --- README.md | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 590f8267..2ec087b4 100644 --- a/README.md +++ b/README.md @@ -16,18 +16,21 @@

- +

From 44b753f17c1616c832cc7ad1913739c64a6ed98d Mon Sep 17 00:00:00 2001 From: Qingsong Wen <2541438+qingsongedu@users.noreply.github.com> Date: Sat, 4 May 2024 08:22:25 -0700 Subject: [PATCH 07/16] Update README.md --- README.md | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 2ec087b4..9927d077 100644 --- a/README.md +++ b/README.md @@ -114,12 +114,24 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for ## Further Reading -1, [**Large Models for Time Series and Spatio-Temporal Data: A Survey and Outlook**](https://arxiv.org/abs/2310.10196), in *arXiv* 2023. +1, [**Position Paper: What Can Large Language Models Tell Us about Time Series Analysis**](https://arxiv.org/abs/2402.02713), in *ICML* 2024. + +**Authors**: Ming Jin, Yifan Zhang, Wei Chen, Kexin Zhang, Yuxuan Liang*, Bin Yang, Jindong Wang, Shirui Pan, Qingsong Wen* + +```bibtex +@inproceedings{jin2024position, + title={Position Paper: What Can Large Language Models Tell Us about Time Series Analysis}, + author={Ming Jin and Yifan Zhang and Wei Chen and Kexin Zhang and Yuxuan Liang and Bin Yang and Jindong Wang and Shirui Pan and Qingsong Wen}, + booktitle={International Conference on Machine Learning (ICML 2024)}, + year={2024} +} +``` + +2, [**Large Models for Time Series and Spatio-Temporal Data: A Survey and Outlook**](https://arxiv.org/abs/2310.10196), in *arXiv* 2023. [\[GitHub Repo\]](https://github.com/qingsongedu/Awesome-TimeSeries-SpatioTemporal-LM-LLM) **Authors**: Ming Jin, Qingsong Wen*, Yuxuan Liang, Chaoli Zhang, Siqiao Xue, Xue Wang, James Zhang, Yi Wang, Haifeng Chen, Xiaoli Li (IEEE Fellow), Shirui Pan*, Vincent S. Tseng (IEEE Fellow), Yu Zheng (IEEE Fellow), Lei Chen (IEEE Fellow), Hui Xiong (IEEE Fellow) - ```bibtex @article{jin2023lm4ts, title={Large Models for Time Series and Spatio-Temporal Data: A Survey and Outlook}, @@ -129,19 +141,7 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for } ``` -2, [**Position Paper: What Can Large Language Models Tell Us about Time Series Analysis**](https://arxiv.org/abs/2402.02713), in *arXiv* 2024. - -**Authors**: Ming Jin, Yifan Zhang, Wei Chen, Kexin Zhang, Yuxuan Liang*, Bin Yang, Jindong Wang, Shirui Pan, Qingsong Wen* - -```bibtex -@article{jin2024position, - title={Position Paper: What Can Large Language Models Tell Us about Time Series Analysis}, - author={Ming Jin and Yifan Zhang and Wei Chen and Kexin Zhang and Yuxuan Liang and Bin Yang and Jindong Wang and Shirui Pan and Qingsong Wen}, - journal={arXiv preprint arXiv:2402.02713}, - year={2024} -} -``` 3, [**Transformers in Time Series: A Survey**](https://arxiv.org/abs/2202.07125), in IJCAI 2023. [\[GitHub Repo\]](https://github.com/qingsongedu/time-series-transformers-review) From 2c2e9c0498615e846d0f445b3b7c5420fe8b2dee Mon Sep 17 00:00:00 2001 From: MetaKing Date: 2024年5月24日 10:45:47 +0800 Subject: [PATCH 08/16] Update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 9927d077..35823f70 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,7 @@ **[Medium Blog]** **[机器之心中文解读]** +**[量子位中文解读]** **[时序人中文解读]** **[AI算法厨房中文解读]** **[知乎中文解读]** From 84846d96a58d044ae6702ae2ce79efadf047ed7f Mon Sep 17 00:00:00 2001 From: MetaKing Date: Mon, 3 Jun 2024 10:45:35 +0800 Subject: [PATCH 09/16] Update README.md --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 35823f70..de697908 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,8 @@ ``` ## Updates +🚩 **News** (May 2024): Time-LLM has been included in [NeuralForecast](https://github.com/Nixtla/neuralforecast). Special thanks to the contributor @[JQGoh](https://https://github.com/JQGoh)! + 🚩 **News** (March 2024): Time-LLM has been upgraded to serve as a general framework for repurposing a wide range of language models to time series forecasting. It now defaults to supporting Llama-7B and includes compatibility with two additional smaller PLMs (GPT-2 and BERT). Simply adjust `--llm_model` and `--llm_dim` to switch backbones. ## Introduction From 062594ad4f3f7c327dace9a7f26f91dfafc53c0d Mon Sep 17 00:00:00 2001 From: MetaKing Date: Mon, 3 Jun 2024 10:46:35 +0800 Subject: [PATCH 10/16] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index de697908..e18be327 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ ``` ## Updates -🚩 **News** (May 2024): Time-LLM has been included in [NeuralForecast](https://github.com/Nixtla/neuralforecast). Special thanks to the contributor @[JQGoh](https://https://github.com/JQGoh)! +🚩 **News** (May 2024): Time-LLM has been included in [NeuralForecast](https://github.com/Nixtla/neuralforecast). Special thanks to the contributor @[JQGoh](https://github.com/JQGoh)! 🚩 **News** (March 2024): Time-LLM has been upgraded to serve as a general framework for repurposing a wide range of language models to time series forecasting. It now defaults to supporting Llama-7B and includes compatibility with two additional smaller PLMs (GPT-2 and BERT). Simply adjust `--llm_model` and `--llm_dim` to switch backbones. From f11e5c8f4cb6a8d66331ec97b307deda0f58b528 Mon Sep 17 00:00:00 2001 From: CityMind Lab <40017013+yoshall@users.noreply.github.com> Date: Mon, 3 Jun 2024 18:08:58 +0800 Subject: [PATCH 11/16] Update README.md --- README.md | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index e18be327..d668789c 100644 --- a/README.md +++ b/README.md @@ -117,7 +117,20 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for ## Further Reading -1, [**Position Paper: What Can Large Language Models Tell Us about Time Series Analysis**](https://arxiv.org/abs/2402.02713), in *ICML* 2024. +1, [**Foundation Models for Time Series Analysis: A Tutorial and Survey**](https://arxiv.org/pdf/2403.14735), in *KDD* 2024. + +**Authors**: Yuxuan Liang, Haomin Wen, Yuqi Nie, Yushan Jiang, Ming Jin, Dongjin Song, Shirui Pan, Qingsong Wen* + +```bibtex +@inproceedings{liang2024foundation, + title={Foundation models for time series analysis: A tutorial and survey}, + author={Liang, Yuxuan and Wen, Haomin and Nie, Yuqi and Jiang, Yushan and Jin, Ming and Song, Dongjin and Pan, Shirui and Wen, Qingsong}, + booktitle={ACM SIGKDD Conference on Knowledge Discovery and Data Mining (KDD 2024)}, + year={2024} +} +``` + +2, [**Position Paper: What Can Large Language Models Tell Us about Time Series Analysis**](https://arxiv.org/abs/2402.02713), in *ICML* 2024. **Authors**: Ming Jin, Yifan Zhang, Wei Chen, Kexin Zhang, Yuxuan Liang*, Bin Yang, Jindong Wang, Shirui Pan, Qingsong Wen* @@ -130,7 +143,7 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for } ``` -2, [**Large Models for Time Series and Spatio-Temporal Data: A Survey and Outlook**](https://arxiv.org/abs/2310.10196), in *arXiv* 2023. +3, [**Large Models for Time Series and Spatio-Temporal Data: A Survey and Outlook**](https://arxiv.org/abs/2310.10196), in *arXiv* 2023. [\[GitHub Repo\]](https://github.com/qingsongedu/Awesome-TimeSeries-SpatioTemporal-LM-LLM) **Authors**: Ming Jin, Qingsong Wen*, Yuxuan Liang, Chaoli Zhang, Siqiao Xue, Xue Wang, James Zhang, Yi Wang, Haifeng Chen, Xiaoli Li (IEEE Fellow), Shirui Pan*, Vincent S. Tseng (IEEE Fellow), Yu Zheng (IEEE Fellow), Lei Chen (IEEE Fellow), Hui Xiong (IEEE Fellow) @@ -145,7 +158,7 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for ``` -3, [**Transformers in Time Series: A Survey**](https://arxiv.org/abs/2202.07125), in IJCAI 2023. +4, [**Transformers in Time Series: A Survey**](https://arxiv.org/abs/2202.07125), in IJCAI 2023. [\[GitHub Repo\]](https://github.com/qingsongedu/time-series-transformers-review) **Authors**: Qingsong Wen, Tian Zhou, Chaoli Zhang, Weiqi Chen, Ziqing Ma, Junchi Yan, Liang Sun @@ -159,7 +172,7 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for } ``` -4, [**TimeMixer: Decomposable Multiscale Mixing for Time Series Forecasting**](https://openreview.net/pdf?id=7oLshfEIC2), in ICLR 2024. +5, [**TimeMixer: Decomposable Multiscale Mixing for Time Series Forecasting**](https://openreview.net/pdf?id=7oLshfEIC2), in ICLR 2024. [\[GitHub Repo\]](https://github.com/kwuking/TimeMixer) **Authors**: Shiyu Wang, Haixu Wu, Xiaoming Shi, Tengge Hu, Huakun Luo, Lintao Ma, James Y. Zhang, Jun Zhou From d3f29569e314af6eeaf6920b368076529e9c98c8 Mon Sep 17 00:00:00 2001 From: MetaKing Date: 2024年6月14日 21:50:07 +0800 Subject: [PATCH 12/16] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d668789c..ae557ec8 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ ``` ## Updates -🚩 **News** (May 2024): Time-LLM has been included in [NeuralForecast](https://github.com/Nixtla/neuralforecast). Special thanks to the contributor @[JQGoh](https://github.com/JQGoh)! +🚩 **News** (May 2024): Time-LLM has been included in [NeuralForecast](https://github.com/Nixtla/neuralforecast). Special thanks to the contributor @[JQGoh](https://github.com/JQGoh) and @[marcopeix](https://github.com/marcopeix)! 🚩 **News** (March 2024): Time-LLM has been upgraded to serve as a general framework for repurposing a wide range of language models to time series forecasting. It now defaults to supporting Llama-7B and includes compatibility with two additional smaller PLMs (GPT-2 and BERT). Simply adjust `--llm_model` and `--llm_dim` to switch backbones. From 0640af58d528fd1acf575c4ad7a1b76b6e62d9e7 Mon Sep 17 00:00:00 2001 From: Qingsong Wen <2541438+qingsongedu@users.noreply.github.com> Date: 2024年8月28日 23:55:37 -0700 Subject: [PATCH 13/16] Update README.md --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index ae557ec8..e1db13b1 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,10 @@ } ``` -## Updates +## Updates/News: + +🚩 **News** (Aug. 2024): Time-LLM has been adopted by XiMou Optimization Technology Co., Ltd. (XMO) for Solar and Wind Forecasting. + 🚩 **News** (May 2024): Time-LLM has been included in [NeuralForecast](https://github.com/Nixtla/neuralforecast). Special thanks to the contributor @[JQGoh](https://github.com/JQGoh) and @[marcopeix](https://github.com/marcopeix)! 🚩 **News** (March 2024): Time-LLM has been upgraded to serve as a general framework for repurposing a wide range of language models to time series forecasting. It now defaults to supporting Llama-7B and includes compatibility with two additional smaller PLMs (GPT-2 and BERT). Simply adjust `--llm_model` and `--llm_dim` to switch backbones. From 0045cde1ae0a7e7960b8f2273404eabac0f9c5da Mon Sep 17 00:00:00 2001 From: Qingsong Wen <2541438+qingsongedu@users.noreply.github.com> Date: 2024年8月29日 00:24:56 -0700 Subject: [PATCH 14/16] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e1db13b1..4833ea3e 100644 --- a/README.md +++ b/README.md @@ -52,7 +52,7 @@ ## Updates/News: -🚩 **News** (Aug. 2024): Time-LLM has been adopted by XiMou Optimization Technology Co., Ltd. (XMO) for Solar and Wind Forecasting. +🚩 **News** (Aug. 2024): Time-LLM has been adopted by XiMou Optimization Technology Co., Ltd. (XMO) for Solar, Wind, and Weather Forecasting. 🚩 **News** (May 2024): Time-LLM has been included in [NeuralForecast](https://github.com/Nixtla/neuralforecast). Special thanks to the contributor @[JQGoh](https://github.com/JQGoh) and @[marcopeix](https://github.com/marcopeix)! From 02ee1b8f6043090c7a417f0cbb64cbf753895175 Mon Sep 17 00:00:00 2001 From: MetaKing Date: Sun, 3 Nov 2024 17:23:03 +0800 Subject: [PATCH 15/16] Update README.md --- README.md | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 4833ea3e..23f5224c 100644 --- a/README.md +++ b/README.md @@ -120,7 +120,21 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for ## Further Reading -1, [**Foundation Models for Time Series Analysis: A Tutorial and Survey**](https://arxiv.org/pdf/2403.14735), in *KDD* 2024. +1, [**TimeMixer++: A General Time Series Pattern Machine for Universal Predictive Analysis**](https://arxiv.org/abs/2410.16032), in *arXiv* 2024. +[\[GitHub Repo\]](https://github.com/kwuking/TimeMixer/blob/main/README.md) + +**Authors**: Shiyu Wang, Jiawei Li, Xiaoming Shi, Zhou Ye, Baichuan Mo, Wenze Lin, Shengtong Ju, Zhixuan Chu, Ming Jin + +```bibtex +@article{wang2024timemixer++, + title={TimeMixer++: A General Time Series Pattern Machine for Universal Predictive Analysis}, + author={Wang, Shiyu and Li, Jiawei and Shi, Xiaoming and Ye, Zhou and Mo, Baichuan and Lin, Wenze and Ju, Shengtong and Chu, Zhixuan and Jin, Ming}, + journal={arXiv preprint arXiv:2410.16032}, + year={2024} +} +``` + +2, [**Foundation Models for Time Series Analysis: A Tutorial and Survey**](https://arxiv.org/pdf/2403.14735), in *KDD* 2024. **Authors**: Yuxuan Liang, Haomin Wen, Yuqi Nie, Yushan Jiang, Ming Jin, Dongjin Song, Shirui Pan, Qingsong Wen* @@ -133,7 +147,7 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for } ``` -2, [**Position Paper: What Can Large Language Models Tell Us about Time Series Analysis**](https://arxiv.org/abs/2402.02713), in *ICML* 2024. +3, [**Position Paper: What Can Large Language Models Tell Us about Time Series Analysis**](https://arxiv.org/abs/2402.02713), in *ICML* 2024. **Authors**: Ming Jin, Yifan Zhang, Wei Chen, Kexin Zhang, Yuxuan Liang*, Bin Yang, Jindong Wang, Shirui Pan, Qingsong Wen* @@ -146,7 +160,7 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for } ``` -3, [**Large Models for Time Series and Spatio-Temporal Data: A Survey and Outlook**](https://arxiv.org/abs/2310.10196), in *arXiv* 2023. +4, [**Large Models for Time Series and Spatio-Temporal Data: A Survey and Outlook**](https://arxiv.org/abs/2310.10196), in *arXiv* 2023. [\[GitHub Repo\]](https://github.com/qingsongedu/Awesome-TimeSeries-SpatioTemporal-LM-LLM) **Authors**: Ming Jin, Qingsong Wen*, Yuxuan Liang, Chaoli Zhang, Siqiao Xue, Xue Wang, James Zhang, Yi Wang, Haifeng Chen, Xiaoli Li (IEEE Fellow), Shirui Pan*, Vincent S. Tseng (IEEE Fellow), Yu Zheng (IEEE Fellow), Lei Chen (IEEE Fellow), Hui Xiong (IEEE Fellow) @@ -161,7 +175,7 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for ``` -4, [**Transformers in Time Series: A Survey**](https://arxiv.org/abs/2202.07125), in IJCAI 2023. +5, [**Transformers in Time Series: A Survey**](https://arxiv.org/abs/2202.07125), in IJCAI 2023. [\[GitHub Repo\]](https://github.com/qingsongedu/time-series-transformers-review) **Authors**: Qingsong Wen, Tian Zhou, Chaoli Zhang, Weiqi Chen, Ziqing Ma, Junchi Yan, Liang Sun @@ -175,7 +189,7 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for } ``` -5, [**TimeMixer: Decomposable Multiscale Mixing for Time Series Forecasting**](https://openreview.net/pdf?id=7oLshfEIC2), in ICLR 2024. +6, [**TimeMixer: Decomposable Multiscale Mixing for Time Series Forecasting**](https://openreview.net/pdf?id=7oLshfEIC2), in ICLR 2024. [\[GitHub Repo\]](https://github.com/kwuking/TimeMixer) **Authors**: Shiyu Wang, Haixu Wu, Xiaoming Shi, Tengge Hu, Huakun Luo, Lintao Ma, James Y. Zhang, Jun Zhou From b13e881f86cd0475ce1b72c17110430663334955 Mon Sep 17 00:00:00 2001 From: Ming Jin Date: 2025年10月15日 21:40:16 +1000 Subject: [PATCH 16/16] Update README.md --- README.md | 94 ++++++++++++++++++++++++++----------------------------- 1 file changed, 44 insertions(+), 50 deletions(-) diff --git a/README.md b/README.md index 23f5224c..8711cf58 100644 --- a/README.md +++ b/README.md @@ -51,12 +51,15 @@ ``` ## Updates/News: +🚩 **News** (Oct. 2025): Time-LLM has been cited 1,000 times in the past two years! 🎉 We are deeply grateful to the community for the incredible support along the journey. 🚩 **News** (Aug. 2024): Time-LLM has been adopted by XiMou Optimization Technology Co., Ltd. (XMO) for Solar, Wind, and Weather Forecasting. +🚩 **News** (Oct. 2024): Time-LLM has been included in [PyPOTS](https://pypots.com/). Many thanks to the PyPOTS team! + 🚩 **News** (May 2024): Time-LLM has been included in [NeuralForecast](https://github.com/Nixtla/neuralforecast). Special thanks to the contributor @[JQGoh](https://github.com/JQGoh) and @[marcopeix](https://github.com/marcopeix)! -🚩 **News** (March 2024): Time-LLM has been upgraded to serve as a general framework for repurposing a wide range of language models to time series forecasting. It now defaults to supporting Llama-7B and includes compatibility with two additional smaller PLMs (GPT-2 and BERT). Simply adjust `--llm_model` and `--llm_dim` to switch backbones. +🚩 **News** (Mar. 2024): Time-LLM has been upgraded to serve as a general framework for repurposing a wide range of language models to time series forecasting. It now defaults to supporting Llama-7B and includes compatibility with two additional smaller PLMs (GPT-2 and BERT). Simply adjust `--llm_model` and `--llm_dim` to switch backbones. ## Introduction Time-LLM is a reprogramming framework to repurpose LLMs for general time series forecasting with the backbone language models kept intact. @@ -120,86 +123,77 @@ Please refer to ```run_main.py```, ```run_m4.py``` and ```run_pretrain.py``` for ## Further Reading -1, [**TimeMixer++: A General Time Series Pattern Machine for Universal Predictive Analysis**](https://arxiv.org/abs/2410.16032), in *arXiv* 2024. -[\[GitHub Repo\]](https://github.com/kwuking/TimeMixer/blob/main/README.md) -**Authors**: Shiyu Wang, Jiawei Li, Xiaoming Shi, Zhou Ye, Baichuan Mo, Wenze Lin, Shengtong Ju, Zhixuan Chu, Ming Jin +As one of the earliest works exploring the intersection of large language models and time series, we sincerely thank the open-source community for supporting our research. While we do not plan to make major updates to the main Time-LLM codebase, we still welcome **constructive pull requests** to help maintain and improve it. -```bibtex -@article{wang2024timemixer++, - title={TimeMixer++: A General Time Series Pattern Machine for Universal Predictive Analysis}, - author={Wang, Shiyu and Li, Jiawei and Shi, Xiaoming and Ye, Zhou and Mo, Baichuan and Lin, Wenze and Ju, Shengtong and Chu, Zhixuan and Jin, Ming}, - journal={arXiv preprint arXiv:2410.16032}, - year={2024} -} -``` +🌟 Please check out our team’s latest research projects listed below. -2, [**Foundation Models for Time Series Analysis: A Tutorial and Survey**](https://arxiv.org/pdf/2403.14735), in *KDD* 2024. +1, [**TimeOmni-1: Incentivizing Complex Reasoning with Time Series in Large Language Models**](https://arxiv.org/pdf/2509.24803), *arXiv* 2025. -**Authors**: Yuxuan Liang, Haomin Wen, Yuqi Nie, Yushan Jiang, Ming Jin, Dongjin Song, Shirui Pan, Qingsong Wen* +**Authors**: Tong Guan, Zijie Meng, Dianqi Li, Shiyu Wang, Chao-Han Huck Yang, Qingsong Wen, Zuozhu Liu, Sabato Marco Siniscalchi, Ming Jin, Shirui Pan ```bibtex -@inproceedings{liang2024foundation, - title={Foundation models for time series analysis: A tutorial and survey}, - author={Liang, Yuxuan and Wen, Haomin and Nie, Yuqi and Jiang, Yushan and Jin, Ming and Song, Dongjin and Pan, Shirui and Wen, Qingsong}, - booktitle={ACM SIGKDD Conference on Knowledge Discovery and Data Mining (KDD 2024)}, - year={2024} +@article{guan2025timeomni, + title={TimeOmni-1: Incentivizing Complex Reasoning with Time Series in Large Language Models}, + author={Guan, Tong and Meng, Zijie and Li, Dianqi and Wang, Shiyu and Yang, Chao-Han Huck and Wen, Qingsong and Liu, Zuozhu and Siniscalchi, Sabato Marco and Jin, Ming and Pan, Shirui}, + journal={arXiv preprint arXiv:2509.24803}, + year={2025} } ``` -3, [**Position Paper: What Can Large Language Models Tell Us about Time Series Analysis**](https://arxiv.org/abs/2402.02713), in *ICML* 2024. +2, [**Time-MQA: Time Series Multi-Task Question Answering with Context Enhancement**](https://arxiv.org/pdf/2503.01875), in *ACL* 2025. +[\[HuggingFace\]](https://huggingface.co/Time-MQA) -**Authors**: Ming Jin, Yifan Zhang, Wei Chen, Kexin Zhang, Yuxuan Liang*, Bin Yang, Jindong Wang, Shirui Pan, Qingsong Wen* +**Authors**: Yaxuan Kong, Yiyuan Yang, Yoontae Hwang, Wenjie Du, Stefan Zohren, Zhangyang Wang, Ming Jin, Qingsong Wen ```bibtex -@inproceedings{jin2024position, - title={Position Paper: What Can Large Language Models Tell Us about Time Series Analysis}, - author={Ming Jin and Yifan Zhang and Wei Chen and Kexin Zhang and Yuxuan Liang and Bin Yang and Jindong Wang and Shirui Pan and Qingsong Wen}, - booktitle={International Conference on Machine Learning (ICML 2024)}, - year={2024} +@inproceedings{kong2025time, + title={Time-mqa: Time series multi-task question answering with context enhancement}, + author={Kong, Yaxuan and Yang, Yiyuan and Hwang, Yoontae and Du, Wenjie and Zohren, Stefan and Wang, Zhangyang and Jin, Ming and Wen, Qingsong}, + booktitle={The 63rd Annual Meeting of the Association for Computational Linguistics (ACL 2025)}, + year={2025} } ``` -4, [**Large Models for Time Series and Spatio-Temporal Data: A Survey and Outlook**](https://arxiv.org/abs/2310.10196), in *arXiv* 2023. -[\[GitHub Repo\]](https://github.com/qingsongedu/Awesome-TimeSeries-SpatioTemporal-LM-LLM) +3, [**Towards Neural Scaling Laws for Time Series Foundation Models**](https://arxiv.org/pdf/2410.12360), in *ICLR* 2025. +[\[GitHub Repo\]](https://github.com/Qingrenn/TSFM-ScalingLaws) -**Authors**: Ming Jin, Qingsong Wen*, Yuxuan Liang, Chaoli Zhang, Siqiao Xue, Xue Wang, James Zhang, Yi Wang, Haifeng Chen, Xiaoli Li (IEEE Fellow), Shirui Pan*, Vincent S. Tseng (IEEE Fellow), Yu Zheng (IEEE Fellow), Lei Chen (IEEE Fellow), Hui Xiong (IEEE Fellow) +**Authors**: Qingren Yao, Chao-Han Huck Yang, Renhe Jiang, Yuxuan Liang, Ming Jin, Shirui Pan ```bibtex -@article{jin2023lm4ts, - title={Large Models for Time Series and Spatio-Temporal Data: A Survey and Outlook}, - author={Ming Jin and Qingsong Wen and Yuxuan Liang and Chaoli Zhang and Siqiao Xue and Xue Wang and James Zhang and Yi Wang and Haifeng Chen and Xiaoli Li and Shirui Pan and Vincent S. Tseng and Yu Zheng and Lei Chen and Hui Xiong}, - journal={arXiv preprint arXiv:2310.10196}, - year={2023} +@inproceedings{yaotowards, + title={Towards Neural Scaling Laws for Time Series Foundation Models}, + author={Yao, Qingren and Yang, Chao-Han Huck and Jiang, Renhe and Liang, Yuxuan and Jin, Ming and Pan, Shirui}, + booktitle={International Conference on Learning Representations (ICLR)} + year={2025} } ``` +4, [**Time-MoE: Billion-Scale Time Series Foundation Models with Mixture of Experts**](https://arxiv.org/pdf/2409.16040), in *ICLR* 2025. +[\[GitHub Repo\]](https://github.com/Time-MoE/Time-MoE) -5, [**Transformers in Time Series: A Survey**](https://arxiv.org/abs/2202.07125), in IJCAI 2023. -[\[GitHub Repo\]](https://github.com/qingsongedu/time-series-transformers-review) - -**Authors**: Qingsong Wen, Tian Zhou, Chaoli Zhang, Weiqi Chen, Ziqing Ma, Junchi Yan, Liang Sun +**Authors**: Xiaoming Shi, Shiyu Wang, Yuqi Nie, Dianqi Li, Zhou Ye, Qingsong Wen, Ming Jin ```bibtex -@inproceedings{wen2023transformers, - title={Transformers in time series: A survey}, - author={Wen, Qingsong and Zhou, Tian and Zhang, Chaoli and Chen, Weiqi and Ma, Ziqing and Yan, Junchi and Sun, Liang}, - booktitle={International Joint Conference on Artificial Intelligence(IJCAI)}, - year={2023} +@inproceedings{shi2024time, + title={Time-moe: Billion-scale time series foundation models with mixture of experts}, + author={Shi, Xiaoming and Wang, Shiyu and Nie, Yuqi and Li, Dianqi and Ye, Zhou and Wen, Qingsong and Jin, Ming}, + booktitle={International Conference on Learning Representations (ICLR)}, + year={2025} } ``` -6, [**TimeMixer: Decomposable Multiscale Mixing for Time Series Forecasting**](https://openreview.net/pdf?id=7oLshfEIC2), in ICLR 2024. -[\[GitHub Repo\]](https://github.com/kwuking/TimeMixer) +5, [**TimeMixer++: A General Time Series Pattern Machine for Universal Predictive Analysis**](https://arxiv.org/abs/2410.16032), in *ICLR* 2025. +[\[GitHub Repo\]](https://github.com/kwuking/TimeMixer/blob/main/README.md) -**Authors**: Shiyu Wang, Haixu Wu, Xiaoming Shi, Tengge Hu, Huakun Luo, Lintao Ma, James Y. Zhang, Jun Zhou +**Authors**: Shiyu Wang, Jiawei Li, Xiaoming Shi, Zhou Ye, Baichuan Mo, Wenze Lin, Shengtong Ju, Zhixuan Chu, Ming Jin ```bibtex -@inproceedings{wang2023timemixer, - title={TimeMixer: Decomposable Multiscale Mixing for Time Series Forecasting}, - author={Wang, Shiyu and Wu, Haixu and Shi, Xiaoming and Hu, Tengge and Luo, Huakun and Ma, Lintao and Zhang, James Y and ZHOU, JUN}, +@inproceedings{wang2024timemixer++, + title={TimeMixer++: A General Time Series Pattern Machine for Universal Predictive Analysis}, + author={Wang, Shiyu and Li, Jiawei and Shi, Xiaoming and Ye, Zhou and Mo, Baichuan and Lin, Wenze and Ju, Shengtong and Chu, Zhixuan and Jin, Ming}, booktitle={International Conference on Learning Representations (ICLR)}, - year={2024} + year={2025} } ```

AltStyle によって変換されたページ (->オリジナル) /