Full Code of uctb/UCTB for AI

master 022310ec26fb cached
518 files
6.7 MB
1.8M tokens
1453 symbols
1 requests
Download .txt
Showing preview only (7,146K chars total). Download the full file or copy to clipboard to get everything.
Repository: uctb/UCTB
Branch: master
Commit: 022310ec26fb
Files: 518
Total size: 6.7 MB

Directory structure:
gitextract_poyacrh5/

├── .gitignore
├── Experiments/
│   ├── AGCRN/
│   │   ├── AGCRN.py
│   │   ├── Runner.py
│   │   └── params.conf
│   ├── ARIMA/
│   │   ├── ARIMA.py
│   │   ├── ARIMA_Parallel.py
│   │   ├── RunnerARIMA.py
│   │   └── trials.py
│   ├── ASTGCN/
│   │   ├── ASTGCN.py
│   │   ├── Runner.py
│   │   └── configurations/
│   │       ├── METR_LA_astgcn.conf
│   │       ├── PEMS04_astgcn.conf
│   │       └── PEMS08_astgcn.conf
│   ├── CustomizedDemo/
│   │   ├── Runner_topk.py
│   │   ├── STMeta_Obj_topk.py
│   │   ├── STMeta_v0.model.yml
│   │   ├── STMeta_v1.model.yml
│   │   ├── STMeta_v2.model.yml
│   │   ├── STMeta_v3.model.yml
│   │   ├── metro_shanghai.data.yml
│   │   └── topKGraph.py
│   ├── DCRNN/
│   │   ├── DCRNN.py
│   │   ├── bike_trial.py
│   │   ├── cs_trial.py
│   │   ├── didi_trial.py
│   │   ├── metr_trial.py
│   │   ├── metro_trial.py
│   │   ├── pems_trial.py
│   │   └── street_didi_trial.py
│   ├── DeepST/
│   │   ├── DeepST.py
│   │   ├── param_search.yml
│   │   └── search_space.json
│   ├── GBRT/
│   │   ├── GBRT.py
│   │   ├── gbrt_config.yml
│   │   └── gbrt_search_space.json
│   ├── GMAN/
│   │   ├── GMAN.py
│   │   └── Runner.py
│   ├── GraphWaveNet/
│   │   ├── GraphWaveNet.py
│   │   └── Runner.py
│   ├── HM/
│   │   ├── HM.py
│   │   ├── hm_closeness_search_space.json
│   │   ├── hm_config.yml
│   │   └── hm_search_space.json
│   ├── HMM/
│   │   ├── HMM.py
│   │   └── trials.py
│   ├── MTGNN/
│   │   ├── MTGNN.py
│   │   └── Runner.py
│   ├── MultiStepPredict/
│   │   ├── Code/
│   │   │   ├── DirRec_ARIMA.py
│   │   │   ├── DirRec_DCRNN.py
│   │   │   ├── DirRec_STMeta.py
│   │   │   ├── DirRec_XGBoost.py
│   │   │   └── viz.py
│   │   └── README.md
│   ├── ParameterSearch/
│   │   ├── ARIMA.py
│   │   ├── CPT_GBRT.py
│   │   ├── CPT_HM.py
│   │   ├── CPT_STMeta_Obj.py
│   │   ├── CPT_XGBoost.py
│   │   ├── config.yml
│   │   ├── hm_config.yml
│   │   ├── hm_search_space.json
│   │   ├── plot_paper.ipynb
│   │   ├── results/
│   │   │   └── HM_Bike_NYC.json
│   │   ├── search_space.json
│   │   ├── xgboost_config.yml
│   │   └── xgboost_search_space.json
│   ├── RegionGeneration/
│   │   └── region_generation.py
│   ├── STGCN/
│   │   ├── Runner.py
│   │   └── STGCN.py
│   ├── STMeta/
│   │   ├── RunnerCPTtrial.py
│   │   ├── RunnerLSTM.py
│   │   ├── RunnerStreetDiDi.py
│   │   ├── RunnerWWW.py
│   │   ├── Runner_GRU.py
│   │   ├── Runner_M1_0.py
│   │   ├── Runner_M1_1.py
│   │   ├── Runner_M2_0.py
│   │   ├── Runner_M2_1.py
│   │   ├── Runner_Main.py
│   │   ├── Runner_PS_Chicago.py
│   │   ├── Runner_PS_NYC.py
│   │   ├── Runner_PS_Shanghai.py
│   │   ├── Runner_singleGraph.py
│   │   ├── Runner_temporalAblation.py
│   │   ├── Runner_v3.py
│   │   ├── STMeta_Obj.py
│   │   ├── STMeta_Obj_time.py
│   │   ├── STMeta_v0.model.yml
│   │   ├── STMeta_v1.model.yml
│   │   ├── STMeta_v2.model.yml
│   │   ├── STMeta_v3.model.yml
│   │   ├── bike_chicago.data.yml
│   │   ├── bike_dc.data.yml
│   │   ├── bike_nyc.data.yml
│   │   ├── chargestation_beijing.data.yml
│   │   ├── didi_chengdu.data.yml
│   │   ├── didi_chengdu_street.data.yml
│   │   ├── didi_xian.data.yml
│   │   ├── didi_xian_street.data.yml
│   │   ├── gc_search.json
│   │   ├── lstm_search.json
│   │   ├── metr_la.data.yml
│   │   ├── metro_chongqing.data.yml
│   │   ├── metro_shanghai.data.yml
│   │   ├── param_search.yml
│   │   └── pems_bay.data.yml
│   ├── STMeta_Transfer/
│   │   ├── Runner.py
│   │   ├── STMeta_Pretrain.py
│   │   ├── STMeta_Transfer.py
│   │   ├── STMeta_Transfer_Dynamic.py
│   │   ├── STMeta_Transfer_Test.py
│   │   ├── STMeta_v1.model.yml
│   │   ├── STMeta_v2.model.yml
│   │   ├── STMeta_v3.model.yml
│   │   ├── STMeta_v4.model.yml
│   │   ├── bike_chicago.data.yml
│   │   ├── bike_dc.data.yml
│   │   ├── bike_nyc.data.yml
│   │   ├── chargestation_beijing.data.yml
│   │   ├── didi_chengdu.data.yml
│   │   ├── didi_xian.data.yml
│   │   ├── metro_chongqing.data.yml
│   │   ├── metro_shanghai.data.yml
│   │   ├── network_search.json
│   │   ├── param_search.yml
│   │   ├── transfer_record.md
│   │   ├── transfer_record_bk.md
│   │   ├── transfer_result_overall.md
│   │   └── 使用流量匹配的结果.md
│   ├── STSGCN/
│   │   ├── Runner.py
│   │   ├── STSGCN.py
│   │   └── config/
│   │       ├── PEMS03/
│   │       │   ├── STMeta_emb.json
│   │       │   ├── STMeta_emb_1.json
│   │       │   ├── individual_GLU_mask_emb.json
│   │       │   ├── individual_GLU_nomask_emb.json
│   │       │   ├── individual_GLU_nomask_noemb.json
│   │       │   ├── individual_relu_nomask_noemb.json
│   │       │   └── sharing_relu_nomask_noemb.json
│   │       ├── PEMS04/
│   │       │   ├── individual_GLU.json
│   │       │   ├── individual_GLU_mask_emb.json
│   │       │   ├── individual_relu.json
│   │       │   ├── sharing_GLU.json
│   │       │   └── sharing_relu.json
│   │       ├── PEMS07/
│   │       │   └── individual_GLU_mask_emb.json
│   │       └── PEMS08/
│   │           └── individual_GLU_mask_emb.json
│   ├── ST_MGCN/
│   │   ├── ST_MGCN_Obj.py
│   │   ├── bike_trial.py
│   │   ├── cs_trial.py
│   │   ├── didi_trial.py
│   │   ├── metr_trial.py
│   │   ├── metro_trial.py
│   │   ├── param_search.yml
│   │   ├── params_search.json
│   │   ├── pems_trial.py
│   │   └── street_didi_trial.py
│   ├── ST_ResNet/
│   │   ├── ST_ResNet.py
│   │   ├── param_search.yml
│   │   └── search_space.json
│   ├── StabilityTest/
│   │   ├── CPT_AMulti_GCLSTM_Obj.py
│   │   ├── CPT_AMulti_GCLSTM_Simplify_Obj.py
│   │   ├── Master_CS_0.py
│   │   ├── Master_DiDi_0.py
│   │   └── Master_Metro_0.py
│   ├── V3_GACN/
│   │   ├── GACN_Master.py
│   │   └── GACN_Obj.py
│   └── XGBoost/
│       ├── XGBoost.py
│       ├── xgboost_config.yml
│       └── xgboost_search_space.json
├── LICENSE
├── QuickStarts/
│   ├── ARIMA.py
│   ├── DCRNN.py
│   ├── DeepST.py
│   ├── GBRT.py
│   ├── GeoMAN.py
│   ├── GraphWaveNet.py
│   ├── HM.py
│   ├── HMM.py
│   ├── STMeta.py
│   ├── ST_ResNet.py
│   ├── Visualization.py
│   ├── XGBoost.py
│   └── XGBoost_Validate.py
├── README.md
├── UCTB/
│   ├── __init__.py
│   ├── dataset/
│   │   ├── __init__.py
│   │   ├── context_loader.py
│   │   ├── data_loader.py
│   │   └── dataset.py
│   ├── evaluation/
│   │   ├── __init__.py
│   │   └── metric.py
│   ├── model/
│   │   ├── AGCRN.py
│   │   ├── ARIMA.py
│   │   ├── ASTGCN.py
│   │   ├── DCRNN.py
│   │   ├── DeepST.py
│   │   ├── GMAN.py
│   │   ├── GeoMAN.py
│   │   ├── GraphWaveNet.py
│   │   ├── HM.py
│   │   ├── HMM.py
│   │   ├── MCSTGCN.py
│   │   ├── MTGNN.py
│   │   ├── STGCN.py
│   │   ├── STMeta.py
│   │   ├── STSGCN.py
│   │   ├── ST_MGCN.py
│   │   ├── ST_ResNet.py
│   │   ├── XGBoost.py
│   │   └── __init__.py
│   ├── model_unit/
│   │   ├── BaseModel.py
│   │   ├── DCRNN_CELL.py
│   │   ├── GraphModelLayers.py
│   │   ├── ST_RNN.py
│   │   └── __init__.py
│   ├── preprocess/
│   │   ├── GraphGenerator.py
│   │   ├── RegionGenerator.py
│   │   ├── __init__.py
│   │   ├── dataset_helper.py
│   │   ├── preprocessor.py
│   │   └── time_utils.py
│   ├── train/
│   │   ├── EarlyStopping.py
│   │   ├── LossFunction.py
│   │   ├── MiniBatchTrain.py
│   │   └── __init__.py
│   └── utils/
│       ├── __init__.py
│       ├── make_predict_dataset.py
│       ├── multi_threads.py
│       ├── utils_AGCRN.py
│       ├── utils_ASTGCN.py
│       ├── utils_GMAN.py
│       ├── utils_GraphWaveNet.py
│       ├── utils_MTGNN.py
│       ├── utils_STGCN.py
│       └── utils_STSGCN.py
├── __init__.py
├── build.py
├── build_install.py
├── dist/
│   └── UCTB-0.3.5-py3-none-any.whl
├── docs/
│   ├── .buildinfo
│   ├── .doctrees/
│   │   ├── APIReference.doctree
│   │   ├── UCTB.dataset.doctree
│   │   ├── UCTB.doctree
│   │   ├── UCTB.evaluation.doctree
│   │   ├── UCTB.model.doctree
│   │   ├── UCTB.model_unit.doctree
│   │   ├── UCTB.preprocess.doctree
│   │   ├── UCTB.train.doctree
│   │   ├── UCTB.utils.doctree
│   │   ├── environment.pickle
│   │   ├── index.doctree
│   │   ├── md_file/
│   │   │   ├── all_results.doctree
│   │   │   ├── index.doctree
│   │   │   ├── installation.doctree
│   │   │   ├── introduction.doctree
│   │   │   ├── predictive_tool.doctree
│   │   │   ├── quickstart.doctree
│   │   │   ├── src/
│   │   │   │   └── image/
│   │   │   │       └── README.doctree
│   │   │   ├── static/
│   │   │   │   ├── MakeDatasetDiDi.doctree
│   │   │   │   ├── MakeDatasetDiDi_TTI.doctree
│   │   │   │   ├── current_supported_models.doctree
│   │   │   │   ├── experiment_on_bike.doctree
│   │   │   │   ├── experiment_on_chargestation.doctree
│   │   │   │   ├── experiment_on_didi.doctree
│   │   │   │   ├── experiment_on_metro.doctree
│   │   │   │   ├── parameter_search.doctree
│   │   │   │   ├── preprocess_api.doctree
│   │   │   │   ├── quick_start.doctree
│   │   │   │   ├── stable_test.doctree
│   │   │   │   ├── stmeta.doctree
│   │   │   │   └── transfer_record.doctree
│   │   │   ├── tutorial.doctree
│   │   │   ├── uctb_group.doctree
│   │   │   ├── urban_dataset.doctree
│   │   │   └── visualization_tool.doctree
│   │   ├── modules.doctree
│   │   └── update_guide.doctree
│   ├── .nojekyll
│   ├── APIReference.html
│   ├── UCTB.dataset.html
│   ├── UCTB.evaluation.html
│   ├── UCTB.html
│   ├── UCTB.model.html
│   ├── UCTB.model_unit.html
│   ├── UCTB.preprocess.html
│   ├── UCTB.train.html
│   ├── UCTB.utils.html
│   ├── _modules/
│   │   ├── UCTB/
│   │   │   ├── dataset/
│   │   │   │   ├── data_loader.html
│   │   │   │   └── dataset.html
│   │   │   ├── evaluation/
│   │   │   │   └── metric.html
│   │   │   ├── model/
│   │   │   │   ├── ARIMA.html
│   │   │   │   ├── DCRNN.html
│   │   │   │   ├── DeepST.html
│   │   │   │   ├── GeoMAN.html
│   │   │   │   ├── HM.html
│   │   │   │   ├── HMM.html
│   │   │   │   ├── STMeta.html
│   │   │   │   ├── ST_MGCN.html
│   │   │   │   ├── ST_ResNet.html
│   │   │   │   └── XGBoost.html
│   │   │   ├── model_unit/
│   │   │   │   ├── BaseModel.html
│   │   │   │   ├── DCRNN_CELL.html
│   │   │   │   ├── GraphModelLayers.html
│   │   │   │   └── ST_RNN.html
│   │   │   ├── preprocess/
│   │   │   │   ├── preprocessor.html
│   │   │   │   └── time_utils.html
│   │   │   ├── train/
│   │   │   │   ├── EarlyStopping.html
│   │   │   │   └── MiniBatchTrain.html
│   │   │   └── utils/
│   │   │       ├── multi_threads.html
│   │   │       └── st_map.html
│   │   ├── index.html
│   │   └── tensorflow/
│   │       └── python/
│   │           └── keras/
│   │               └── utils/
│   │                   └── tf_utils.html
│   ├── _sources/
│   │   ├── APIReference.rst.txt
│   │   ├── UCTB.dataset.rst.txt
│   │   ├── UCTB.evaluation.rst.txt
│   │   ├── UCTB.model.rst.txt
│   │   ├── UCTB.model_unit.rst.txt
│   │   ├── UCTB.preprocess.rst.txt
│   │   ├── UCTB.rst.txt
│   │   ├── UCTB.train.rst.txt
│   │   ├── UCTB.utils.rst.txt
│   │   ├── index.rst.txt
│   │   ├── md_file/
│   │   │   ├── all_results.md.txt
│   │   │   ├── all_results_setting.md.txt
│   │   │   ├── index.md.txt
│   │   │   ├── installation.md.txt
│   │   │   ├── introduction.md.txt
│   │   │   ├── predictive_tool.md.txt
│   │   │   ├── quickstart.md.txt
│   │   │   ├── src/
│   │   │   │   └── image/
│   │   │   │       └── README.md.txt
│   │   │   ├── static/
│   │   │   │   ├── MakeDatasetDiDi.md.txt
│   │   │   │   ├── MakeDatasetDiDi_TTI.md.txt
│   │   │   │   ├── all_results.md.txt
│   │   │   │   ├── amulti_gclstm.md.txt
│   │   │   │   ├── current_supported_models.md.txt
│   │   │   │   ├── experiment_on_bike.md.txt
│   │   │   │   ├── experiment_on_chargestation.md.txt
│   │   │   │   ├── experiment_on_didi.md.txt
│   │   │   │   ├── experiment_on_metro.md.txt
│   │   │   │   ├── parameter_search.md.txt
│   │   │   │   ├── preprocess_api.md.txt
│   │   │   │   ├── quick_start.md.txt
│   │   │   │   ├── stable_test.md.txt
│   │   │   │   ├── stmeta.md.txt
│   │   │   │   ├── transfer_record.md.txt
│   │   │   │   └── tutorial.md.txt
│   │   │   ├── tutorial.md.txt
│   │   │   ├── uctb_group.md.txt
│   │   │   ├── urban_dataset.md.txt
│   │   │   └── visualization_tool.md.txt
│   │   ├── modules.rst.txt
│   │   └── update_guide.txt
│   ├── _static/
│   │   ├── _sphinx_javascript_frameworks_compat.js
│   │   ├── alabaster.css
│   │   ├── basic.css
│   │   ├── css/
│   │   │   ├── badge_only.css
│   │   │   └── theme.css
│   │   ├── custom.css
│   │   ├── doctools.js
│   │   ├── documentation_options.js
│   │   ├── epub.css
│   │   ├── ie6.css
│   │   ├── jquery-3.1.0.js
│   │   ├── jquery-3.2.1.js
│   │   ├── jquery-3.4.1.js
│   │   ├── jquery-3.5.1.js
│   │   ├── jquery-3.6.0.js
│   │   ├── jquery.js
│   │   ├── js/
│   │   │   └── theme.js
│   │   ├── language_data.js
│   │   ├── misty-light-windows.css
│   │   ├── nature.css
│   │   ├── pygments.css
│   │   ├── pyramid.css
│   │   ├── searchtools.js
│   │   ├── sphinx_highlight.js
│   │   ├── sphinxdoc.css
│   │   ├── underscore-1.13.1.js
│   │   ├── underscore-1.3.1.js
│   │   ├── underscore.js
│   │   └── websupport.js
│   ├── genindex.html
│   ├── index.html
│   ├── md_file/
│   │   ├── all_results.html
│   │   ├── installation.html
│   │   ├── introduction.html
│   │   ├── predictive_tool.html
│   │   ├── src/
│   │   │   └── image/
│   │   │       └── README.html
│   │   ├── static/
│   │   │   ├── stable_test.html
│   │   │   └── transfer_record.html
│   │   ├── uctb_group.html
│   │   ├── urban_dataset.html
│   │   └── visualization_tool.html
│   ├── modules.html
│   ├── objects.inv
│   ├── py-modindex.html
│   ├── search.html
│   ├── searchindex.js
│   ├── sphinx/
│   │   ├── APIReference.rst
│   │   ├── Makefile
│   │   ├── UCTB.dataset.rst
│   │   ├── UCTB.evaluation.rst
│   │   ├── UCTB.model.rst
│   │   ├── UCTB.model_unit.rst
│   │   ├── UCTB.preprocess.rst
│   │   ├── UCTB.rst
│   │   ├── UCTB.train.rst
│   │   ├── UCTB.utils.rst
│   │   ├── _build/
│   │   │   ├── .buildinfo
│   │   │   ├── .doctrees/
│   │   │   │   ├── APIReference.doctree
│   │   │   │   ├── UCTB.dataset.doctree
│   │   │   │   ├── UCTB.doctree
│   │   │   │   ├── UCTB.evaluation.doctree
│   │   │   │   ├── UCTB.model.doctree
│   │   │   │   ├── UCTB.model_unit.doctree
│   │   │   │   ├── UCTB.preprocess.doctree
│   │   │   │   ├── UCTB.train.doctree
│   │   │   │   ├── UCTB.utils.doctree
│   │   │   │   ├── environment.pickle
│   │   │   │   ├── index.doctree
│   │   │   │   ├── md_file/
│   │   │   │   │   ├── all_results.doctree
│   │   │   │   │   ├── installation.doctree
│   │   │   │   │   ├── introduction.doctree
│   │   │   │   │   ├── predictive_tool.doctree
│   │   │   │   │   ├── src/
│   │   │   │   │   │   └── image/
│   │   │   │   │   │       └── README.doctree
│   │   │   │   │   ├── static/
│   │   │   │   │   │   ├── stable_test.doctree
│   │   │   │   │   │   └── transfer_record.doctree
│   │   │   │   │   ├── uctb_group.doctree
│   │   │   │   │   ├── urban_dataset.doctree
│   │   │   │   │   └── visualization_tool.doctree
│   │   │   │   ├── modules.doctree
│   │   │   │   └── update_guide.doctree
│   │   │   ├── .nojekyll
│   │   │   ├── APIReference.html
│   │   │   ├── UCTB.dataset.html
│   │   │   ├── UCTB.evaluation.html
│   │   │   ├── UCTB.html
│   │   │   ├── UCTB.model.html
│   │   │   ├── UCTB.model_unit.html
│   │   │   ├── UCTB.preprocess.html
│   │   │   ├── UCTB.train.html
│   │   │   ├── UCTB.utils.html
│   │   │   ├── _sources/
│   │   │   │   ├── APIReference.rst.txt
│   │   │   │   ├── UCTB.dataset.rst.txt
│   │   │   │   ├── UCTB.evaluation.rst.txt
│   │   │   │   ├── UCTB.model.rst.txt
│   │   │   │   ├── UCTB.model_unit.rst.txt
│   │   │   │   ├── UCTB.preprocess.rst.txt
│   │   │   │   ├── UCTB.rst.txt
│   │   │   │   ├── UCTB.train.rst.txt
│   │   │   │   ├── UCTB.utils.rst.txt
│   │   │   │   ├── index.rst.txt
│   │   │   │   ├── md_file/
│   │   │   │   │   ├── all_results.md.txt
│   │   │   │   │   ├── installation.md.txt
│   │   │   │   │   ├── introduction.md.txt
│   │   │   │   │   ├── predictive_tool.md.txt
│   │   │   │   │   ├── src/
│   │   │   │   │   │   └── image/
│   │   │   │   │   │       └── README.md.txt
│   │   │   │   │   ├── static/
│   │   │   │   │   │   ├── stable_test.md.txt
│   │   │   │   │   │   └── transfer_record.md.txt
│   │   │   │   │   ├── uctb_group.md.txt
│   │   │   │   │   ├── urban_dataset.md.txt
│   │   │   │   │   └── visualization_tool.md.txt
│   │   │   │   ├── modules.rst.txt
│   │   │   │   └── update_guide.txt
│   │   │   ├── _static/
│   │   │   │   ├── _sphinx_javascript_frameworks_compat.js
│   │   │   │   ├── basic.css
│   │   │   │   ├── css/
│   │   │   │   │   ├── badge_only.css
│   │   │   │   │   └── theme.css
│   │   │   │   ├── doctools.js
│   │   │   │   ├── documentation_options.js
│   │   │   │   ├── jquery-3.1.0.js
│   │   │   │   ├── jquery-3.6.0.js
│   │   │   │   ├── jquery.js
│   │   │   │   ├── js/
│   │   │   │   │   └── theme.js
│   │   │   │   ├── language_data.js
│   │   │   │   ├── pygments.css
│   │   │   │   ├── searchtools.js
│   │   │   │   ├── underscore-1.13.1.js
│   │   │   │   ├── underscore-1.3.1.js
│   │   │   │   ├── underscore.js
│   │   │   │   └── websupport.js
│   │   │   ├── genindex.html
│   │   │   ├── index.html
│   │   │   ├── md_file/
│   │   │   │   ├── all_results.html
│   │   │   │   ├── installation.html
│   │   │   │   ├── introduction.html
│   │   │   │   ├── predictive_tool.html
│   │   │   │   ├── src/
│   │   │   │   │   └── image/
│   │   │   │   │       └── README.html
│   │   │   │   ├── static/
│   │   │   │   │   ├── stable_test.html
│   │   │   │   │   └── transfer_record.html
│   │   │   │   ├── uctb_group.html
│   │   │   │   ├── update_guide.html
│   │   │   │   ├── urban_dataset.html
│   │   │   │   └── visualization_tool.html
│   │   │   ├── modules.html
│   │   │   ├── objects.inv
│   │   │   ├── py-modindex.html
│   │   │   ├── search.html
│   │   │   ├── searchindex.js
│   │   │   └── update_guide.html
│   │   ├── conf.py
│   │   ├── index.rst
│   │   ├── make.bat
│   │   ├── md_file/
│   │   │   ├── .gitignore
│   │   │   ├── all_results.md
│   │   │   ├── installation.md
│   │   │   ├── introduction.md
│   │   │   ├── predictive_tool.md
│   │   │   ├── src/
│   │   │   │   └── image/
│   │   │   │       └── README.md
│   │   │   ├── static/
│   │   │   │   ├── stable_test.md
│   │   │   │   └── transfer_record.md
│   │   │   ├── uctb_group.md
│   │   │   ├── urban_dataset.md
│   │   │   └── visualization_tool.md
│   │   ├── modules.rst
│   │   └── update_guide.txt
│   └── update_guide.html
├── environment.yaml
└── setup.py

================================================
FILE CONTENTS
================================================

================================================
FILE: .gitignore
================================================
*$py.class
*,cover
*.egg
*.egg-info/
*.log
*.manifest
*.mo
*.pot
*.py[co]
*.py[cod]
*.so
*.spec
*/.DS_Store
*~
.DS_Store
._.DS_Store
.Python
.cache
.coverage
.coverage.*
.eggs/
.env
.idea
*/.idea
_pycache__/
build/
*.npy
.vscode/
*.pkl
model_dir/
.ipynb_checkpoints/
@*
output/
*data/

================================================
FILE: Experiments/AGCRN/AGCRN.py
================================================
import os
import GPUtil
import torch
import argparse
import configparser

from datetime import datetime
from UCTB.model.AGCRN import AGCRN
from UCTB.utils.utils_AGCRN import Trainer
from UCTB.dataset import NodeTrafficLoader
from UCTB.utils.utils_AGCRN import get_dataloader_AGCRN
from UCTB.evaluation import metric

# Is GPU available
deviceIDs = GPUtil.getAvailable(order='last', limit=8, maxLoad=1, maxMemory=0.2,
                                includeNan=False, excludeID=[], excludeUUID=[])
if len(deviceIDs) == 0:
    current_device = '-1'
else:
    current_device = str(deviceIDs[0])
DEVICE = 'cuda:{}'.format(current_device)

#parser
args = argparse.ArgumentParser(description='arguments')
args.add_argument('--mode', default='train', type=str)
args.add_argument('--debug', default='False', type=eval)
args.add_argument('--model', default='AGCRN', type=str)
args.add_argument('--cuda', default=True, type=bool)

DATASETPATH = os.path.abspath('.')+'/params.conf'

#get configuration
config = configparser.ConfigParser()
config.read(DATASETPATH)
#device
args.add_argument('--device', default=DEVICE, type=str, help='indices of GPUs')
#data
args.add_argument('--lag', default=config['data']['lag'], type=int)
args.add_argument('--horizon', default=config['data']['horizon'], type=int)
args.add_argument('--num_nodes', default=config['data']['num_nodes'], type=int)
args.add_argument('--tod', default=config['data']['tod'], type=eval)
args.add_argument('--normalize', default="Zscore", type=str)
args.add_argument(
    '--column_wise', default=config['data']['column_wise'], type=eval)
args.add_argument('--default_graph',
                  default=config['data']['default_graph'], type=eval)
#model
args.add_argument(
    '--input_dim', default=config['model']['input_dim'], type=int)
args.add_argument(
    '--output_dim', default=config['model']['output_dim'], type=int)
args.add_argument(
    '--embed_dim', default=config['model']['embed_dim'], type=int)
args.add_argument(
    '--rnn_units', default=config['model']['rnn_units'], type=int)
args.add_argument(
    '--num_layers', default=config['model']['num_layers'], type=int)
args.add_argument('--cheb_k', default=config['model']['cheb_order'], type=int)
#train
args.add_argument(
    '--loss_func', default=config['train']['loss_func'], type=str)
args.add_argument('--seed', default=config['train']['seed'], type=int)
args.add_argument(
    '--batch_size', default=config['train']['batch_size'], type=int)
args.add_argument('--epochs', default=config['train']['epochs'], type=int)
args.add_argument('--lr_init', default=config['train']['lr_init'], type=float)
args.add_argument('--lr_decay', default=config['train']['lr_decay'], type=eval)
args.add_argument('--lr_decay_rate',
                  default=config['train']['lr_decay_rate'], type=float)
args.add_argument('--lr_decay_step',
                  default=config['train']['lr_decay_step'], type=str)
args.add_argument(
    '--early_stop', default=config['train']['early_stop'], type=eval)
args.add_argument('--early_stop_patience',
                  default=config['train']['early_stop_patience'], type=int)
args.add_argument(
    '--grad_norm', default=config['train']['grad_norm'], type=eval)
args.add_argument('--max_grad_norm',
                  default=config['train']['max_grad_norm'], type=int)
args.add_argument('--teacher_forcing', default=False, type=bool)
#args.add_argument('--tf_decay_steps', default=2000, type=int, help='teacher forcing decay steps')
args.add_argument('--real_value', default=config['train']['real_value'],
                  type=eval, help='use real value for loss calculation')
#test
args.add_argument(
    '--mae_thresh', default=config['test']['mae_thresh'], type=eval)
args.add_argument(
    '--mape_thresh', default=config['test']['mape_thresh'], type=float)
#log
args.add_argument('--log_dir', default='./', type=str)
args.add_argument('--log_step', default=config['log']['log_step'], type=int)
args.add_argument('--plot', default=config['log']['plot'], type=eval)

# data loader parameters
args.add_argument("--dataset", default='Bike', type=str,
                  help="configuration file path")
args.add_argument("--city", default='NYC', type=str)
args.add_argument("--closeness_len", default=6, type=int)
args.add_argument("--period_len", default=7, type=int)
args.add_argument("--trend_len", default=4, type=int)
args.add_argument("--data_range", default="all", type=str)
args.add_argument("--train_data_length", default="all", type=str)
args.add_argument("--test_ratio", default=0.1, type=float)
args.add_argument("--MergeIndex", default=1, type=int)
args.add_argument("--MergeWay", default="sum", type=str)

args = args.parse_args()

# loading data
data_loader = NodeTrafficLoader(dataset=args.dataset, city=args.city,
                                data_range=args.data_range, train_data_length=args.train_data_length,
                                test_ratio=float(args.test_ratio),
                                closeness_len=args.closeness_len,
                                period_len=args.period_len,
                                trend_len=args.trend_len,
                                normalize=args.normalize,
                                MergeIndex=args.MergeIndex,
                                MergeWay=args.MergeWay)

args.num_nodes = data_loader.station_number

#load dataset
train_loader, val_loader, test_loader = get_dataloader_AGCRN(data_loader,
                                                                     tod=args.tod, batchsize=args.batch_size, dow=False,
                                                                     weather=False, single=False)


#model build
model = AGCRN(args.num_nodes,args.input_dim,args.rnn_units,args.output_dim,args.horizon,args.num_layers,args.default_graph,args.embed_dim,args.cheb_k)
model = model.to(args.device)


#config log path
current_time = datetime.now().strftime('%Y%m%d%H%M%S')
current_dir = os.path.dirname(os.path.realpath(__file__))
log_dir = os.path.join(current_dir, 'model_dir', "{}_{}_{}_{}_{}_{}".format(
    args.dataset, args.city, args.MergeIndex, args.closeness_len, args.period_len, args.trend_len))
if not os.path.exists(log_dir):
    os.makedirs(log_dir)
print("log_dir:", log_dir)
args.log_dir = log_dir

#Train Or Test
trainer = Trainer(model, train_loader, val_loader, test_loader,  args)
if args.mode == 'train':
    # Train
    trainer.train()

model.load_state_dict(torch.load(os.path.join(log_dir, "best_model.pth")))

print("Load saved model")

# Test
test_prediction = trainer.test(
    model, trainer.args, test_loader, trainer.logger)
test_prediction = data_loader.normalizer.inverse_transform(test_prediction)
y_true = data_loader.normalizer.inverse_transform(data_loader.test_y)
test_rmse = metric.rmse(prediction=test_prediction.squeeze(), target=y_true.squeeze())
test_rmse = metric.rmse(prediction=test_prediction.squeeze(), target=data_loader.test_y.squeeze())

print('Test RMSE', test_rmse)


================================================
FILE: Experiments/AGCRN/Runner.py
================================================
import os

#############################################
# BenchMark Bike
#############################################
########### NYC ###########
# os.system("python AGCRN.py --dataset Bike --city NYC --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum")

# os.system("python AGCRN.py --dataset Bike --city NYC --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum")

# os.system("python AGCRN.py --dataset Bike --city NYC --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum")


# # ########### Chicago ###########
os.system("python AGCRN.py --dataset Bike --city Chicago --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum")

# # # os.system("python AGCRN.py --dataset Bike --city Chicago --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum")

# os.system("python AGCRN.py --dataset Bike --city Chicago --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum")


# # ########### DC ###########
# # # os.system("python AGCRN.py --dataset Bike --city DC --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum")

# # # os.system("python AGCRN.py --dataset Bike --city DC --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum")

# os.system("python AGCRN.py --dataset Bike --city DC --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum")



# # ###############################################
# # # BenchMark DiDi
# # ###############################################
# # ############# Xian #############
# # # os.system("python AGCRN.py --dataset DiDi --city Xian --MergeIndex 3 --MergeWay sum")

# # # os.system("python AGCRN.py --dataset DiDi --city Xian --MergeIndex 6 --MergeWay sum")

# os.system("python AGCRN.py --dataset DiDi --city Xian --MergeIndex 12 --MergeWay sum")

# # ############# Chengdu #############
# # # os.system("python AGCRN.py --dataset DiDi --city Chengdu --MergeIndex 3 --MergeWay sum")

# # # os.system("python AGCRN.py --dataset DiDi --city Chengdu --MergeIndex 6 --MergeWay sum")

# os.system("python AGCRN.py --dataset DiDi --city Chengdu --MergeIndex 12 --MergeWay sum")



# # ###############################################
# # # BenchMark Metro
# # ###############################################
# # ############# Chongqing #############
# # # os.system("python AGCRN.py --dataset Metro --city Chongqing --MergeIndex 3 --MergeWay sum")

# # # os.system("python AGCRN.py --dataset Metro --city Chongqing --MergeIndex 6 --MergeWay sum")

#os.system("python AGCRN.py --dataset Metro --city Chongqing --MergeIndex 12 --MergeWay sum")


# # ############# Shanghai #############
# # # os.system("python AGCRN.py --dataset Metro --city Shanghai --MergeIndex 3 --MergeWay sum")

# # # os.system("python AGCRN.py --dataset Metro --city Shanghai --MergeIndex 6 --MergeWay sum")

# os.system("python AGCRN.py --dataset Metro --city Shanghai --MergeIndex 12 --MergeWay sum")



# # ###############################################
# # # BenchMark ChargeStation
# # ###############################################

# # # os.system("python AGCRN.py --dataset ChargeStation --city Beijing --MergeIndex 1 --MergeWay max")

# os.system("python AGCRN.py --dataset ChargeStation --city Beijing --MergeIndex 2 --MergeWay max")




# # ###############################################
# # # BenchMark METR-LA
# # ###############################################

# # # os.system("python AGCRN.py --dataset METR --city LA --MergeIndex 3 --MergeWay average")

# # # os.system("python AGCRN.py --dataset METR --city LA --MergeIndex 6 --MergeWay average")

# os.system("python AGCRN.py --dataset METR --city LA --MergeIndex 12 --MergeWay average")


# # ###############################################
# # # BenchMark PEMS-BAY
# # ###############################################
# # # os.system("python AGCRN.py --dataset PEMS --city BAY --MergeIndex 3 --MergeWay average")

# # # os.system("python AGCRN.py --dataset PEMS --city BAY --MergeIndex 6 --MergeWay average")

# os.system("python AGCRN.py --dataset PEMS --city BAY --MergeIndex 12 --MergeWay average")


================================================
FILE: Experiments/AGCRN/params.conf
================================================
[data]
num_nodes =717 
lag = 12
horizon = 1
val_ratio = 0.2
test_ratio = 0.2
tod = False
normalizer = std
column_wise = False
default_graph = True

[model]
input_dim = 1
output_dim = 1
embed_dim = 10
rnn_units = 64
num_layers = 2
cheb_order = 2

[train]
loss_func = mae
seed = 10
batch_size = 16
epochs = 1500
lr_init = 0.003
lr_decay = False
lr_decay_rate = 0.3
lr_decay_step = 5,20,40,70
early_stop = False
early_stop_patience = 15
grad_norm = False
max_grad_norm = 5
real_value = True

[test]
mae_thresh = None
mape_thresh = 0.

[log]
log_step = 20
plot = False

================================================
FILE: Experiments/ARIMA/ARIMA.py
================================================
import numpy as np
import argparse
from tqdm import tqdm
from UCTB.model import ARIMA
from UCTB.dataset import NodeTrafficLoader
from UCTB.evaluation import metric
from UCTB.preprocess import SplitData
import os

import warnings

warnings.filterwarnings('ignore')


parser = argparse.ArgumentParser(description="Argument Parser")
# data source
parser.add_argument('--dataset', default=' ', type=str)
parser.add_argument('--city', default=None)
parser.add_argument('--MergeIndex', default=3)
parser.add_argument('--MergeWay', default='sum',type=str)
parser.add_argument('--test_ratio', default=0.1, type=float)

# network parameter
parser.add_argument('--CT', default='168', type=int)

parser.add_argument('--ar', default='3', type=int)
parser.add_argument('--d', default='0', type=int)
parser.add_argument('--ma', default='1', type=int)

parser.add_argument('--sar', default='0', type=int)
parser.add_argument('--sd', default='0', type=int)
parser.add_argument('--sma', default='0', type=int)
parser.add_argument('--sp', default='0', type=int)


parser.add_argument('--DataRange', default="all")
parser.add_argument('--TrainDays', default="all")


args = vars(parser.parse_args())


data_loader = NodeTrafficLoader(dataset=args['dataset'], city=args['city'],
                                data_range=args['DataRange'], train_data_length=args['TrainDays'],
                                test_ratio=args['test_ratio'],
                                closeness_len=int(args['CT']), period_len=0, trend_len=0,
                                with_lm=False, with_tpe=False, normalize=False,MergeIndex=args['MergeIndex'],
                                MergeWay=args['MergeWay'])
                                

train_closeness, val_closeness = SplitData.split_data(data_loader.train_closeness, [0.9, 0.1])
train_y, val_y = SplitData.split_data(data_loader.train_y, [0.9, 0.1])

val_prediction_collector = []
test_prediction_collector = []


print('*************************************************************')

for i in tqdm(range(data_loader.station_number)):

    try:
        model_obj = ARIMA(time_sequence=train_closeness[:, i, -1, 0],
                          order=[args['ar'], args['d'], args['ma']],
                          seasonal_order=[args['sar'], args['sd'], args['sma'], args['sp']])

        val_prediction = model_obj.predict(
            time_sequences=val_closeness[:, i, :, 0], forecast_step=1)
        test_prediction = model_obj.predict(
            time_sequences=data_loader.test_closeness[:, i, :, 0], forecast_step=1)

    except Exception as e:
        print('Converge failed with error', e)
        print('Using last as prediction')

        val_prediction = val_closeness[:, i, -1:, :]
        test_prediction = data_loader.test_closeness[:, i, -1:, :]

    val_prediction_collector.append(val_prediction)
    test_prediction_collector.append(test_prediction)

    print('Station', i, metric.rmse(test_prediction,
                                    data_loader.test_y[:, i:i+1]))


val_prediction_collector = np.concatenate(val_prediction_collector, axis=-2)
test_prediction_collector = np.concatenate(test_prediction_collector, axis=-2)

val_rmse = metric.rmse(val_prediction_collector, val_y)
test_rmse = metric.rmse(test_prediction_collector,
                        data_loader.test_y)

print(args['dataset'], args['city'], 'val_rmse', val_rmse)
print(args['dataset'], args['city'],'test_rmse', test_rmse)


print('*************************************************************')


================================================
FILE: Experiments/ARIMA/ARIMA_Parallel.py
================================================
import os
import numpy as np
import argparse

from UCTB.model import ARIMA
from UCTB.dataset import NodeTrafficLoader
from UCTB.evaluation import metric
from UCTB.utils import multiple_process


parser = argparse.ArgumentParser(description="Argument Parser")
# data source
parser.add_argument('--Dataset', default='Bike')
parser.add_argument('--City', default='NYC')
# network parameter
parser.add_argument('--CT', default='24', type=int)

parser.add_argument('--ar', default='6', type=int)
parser.add_argument('--d', default='0', type=int)
parser.add_argument('--ma', default='1', type=int)

parser.add_argument('--sar', default='0', type=int)
parser.add_argument('--sd', default='0', type=int)
parser.add_argument('--sma', default='0', type=int)
parser.add_argument('--sp', default='0', type=int)

parser.add_argument('--DataRange', default='All')
parser.add_argument('--TrainDays', default='365')

args = vars(parser.parse_args())

data_loader = NodeTrafficLoader(dataset=args['Dataset'], city=args['City'],
                                closeness_len=int(args['CT']), period_len=0, trend_len=0,
                                data_range=args['DataRange'], train_data_length=args['TrainDays'],
                                with_lm=False, with_tpe=False, normalize=False)


def task(share_queue, locker, data, parameters):

    print('Child process %s with pid %s' % (parameters[0], os.getpid()))

    val_collector = {}
    test_collector = {}

    for i in data:

        print('Child process %s' % (parameters[0]),
              args['Dataset'], args['City'], 'Station', i, 'total', data_loader.station_number)

        try:
            model_obj = ARIMA(time_sequence=data_loader.train_closeness[:, i, -1, 0],
                              order=[args['ar'], args['d'], args['ma']],
                              seasonal_order=[args['sar'], args['sd'], args['sma'], args['sp']])

            test_prediction = model_obj.predict(time_sequences=data_loader.test_closeness[:, i, :, 0], forecast_step=1)

            del model_obj

        except Exception as e:
            print('Converge failed with error', e)
            print('Using last as prediction')

            test_prediction = data_loader.test_closeness[:, i, -1:, :]

        test_collector[i] = test_prediction

        print('Station', i, metric.rmse(test_prediction, data_loader.test_y[:, i:i + 1]))

    locker.acquire()
    share_queue.put([val_collector, test_collector])
    locker.release()


def reduce_fn(a, b):
    a[0].update(b[0])
    a[1].update(b[1])
    return a


if __name__ == '__main__':

    n_job = 8

    result = multiple_process(distribute_list=range(data_loader.station_number),
                              partition_func=lambda data, i, n_job:
                              [data[e] for e in range(len(data)) if e % n_job == i],
                              task_func=task, n_jobs=n_job, reduce_func=reduce_fn, parameters=[])

    test_rmse_collector = [e[1] for e in sorted(result[1].items(), key=lambda x: x[0])]

    test_rmse_collector = np.concatenate(test_rmse_collector, axis=-2)

    test_rmse = metric.rmse(test_rmse_collector, data_loader.test_y)

    print(args['Dataset'], args['City'], 'test_rmse', test_rmse)

================================================
FILE: Experiments/ARIMA/RunnerARIMA.py
================================================
import os
from tqdm import tqdm
# dataset = [['Bike','NYC','all','365','sum','0.1'],['DiDi','Xian','all','all','sum','0.1'],
# ['Metro','Chongqing','all','all','sum','0.1'],['ChargeStation','Beijing','all','all','max','0.1'],
# ['METR','LA','all','all','average','0.2'],['PEMS','BAY','all','all','average','0.2']]
# dataset = [['METR','LA','all','all','average','0.2'],['PEMS','BAY','all','all','average','0.2']]

# dataset = [['Bike', 'NYC', '0.125', '60', 'sum', '0.1'], ['Bike', 'Chicago', '0.125', '60', 'sum', '0.1'], ['Bike', 'DC', '0.125', '60', 'sum', '0.1'],
#            ['DiDi', 'Xian', 'all', 'all', 'sum', '0.1'], ['DiDi', 'Chengdu', 'all', 'all', 'sum', '0.1'],
#            ['Metro', 'Chongqing', 'all', 'all', 'sum', '0.1'], ['Metro', 'Shanghai', 'all', 'all', 'sum', '0.1'],
#            ['METR', 'LA', 'all', 'all', 'average', '0.2'], ['PEMS', 'BAY', 'all', 'all', 'average', '0.2']]

dataset = [['DiDi', 'Xian_Street', 'all', 'all', 'sum', '0.1'], ['DiDi', 'Chengdu_Street', 'all', 'all', 'sum', '0.1']]



with open("ARIMAresult3.txt","w") as fp:

    for index in tqdm(range(len(dataset))):

        fp.write("*********************************************************\n")
        fp.write("Processing city----------------{}---using ARIMA-------MergeIndex 12 --".format(dataset[index]))
        f_tmp = os.popen("python -W ignore ARIMA.py --dataset {} --city {} --MergeIndex 12 --DataRange {} --TrainDays {} --MergeWay {} --test_ratio {}".format(dataset[index][0],dataset[index][1],dataset[index][2],dataset[index][3],dataset[index][4],dataset[index][5]), "r")
        # to record ouput
        fp.write(f_tmp.read()) 
        fp.flush()
        f_tmp.close()

    fp.write("\n")


================================================
FILE: Experiments/ARIMA/trials.py
================================================
import os

from UCTB.utils import multiple_process


def task_func(share_queue, locker, data, parameters):

    print('Child process %s with pid %s' % (parameters[0], os.getpid()))

    for task in data:
        print('Child process', parameters[0], 'running', task)
        exec_str = 'python ARIMA.py --Dataset %s --City %s ' % (task[0], task[1])
        if task[2] != '':
            exec_str += task[2]
        os.system(exec_str)

    locker.acquire()
    share_queue.put(None)
    locker.release()


if __name__ == '__main__':

    task_list = [
        ['Bike', 'NYC', ''],
        ['Bike', 'Chicago', ''],
        ['Bike', 'DC', ''],
        ['Metro', 'Chongqing', ''],
        ['Metro', 'Shanghai', ''],
        ['DiDi', 'Chengdu', ''],
        ['DiDi', 'Xian', ''],
        ['ChargeStation', 'Beijing', '']
    ]

    n_jobs = 1

    multiple_process(distribute_list=task_list,
                     partition_func=lambda data, i, n_job: [data[e] for e in range(len(data)) if e % n_job == i],
                     task_func=task_func, n_jobs=n_jobs,
                     reduce_func=lambda x,y: None, parameters=[])



================================================
FILE: Experiments/ASTGCN/ASTGCN.py
================================================
import torch
import os
import GPUtil
import argparse
import configparser

from UCTB.model.ASTGCN import make_model
from UCTB.evaluation import metric
from UCTB.preprocess.GraphGenerator import GraphGenerator
from UCTB.dataset import NodeTrafficLoader
from UCTB.utils.utils_ASTGCN import load_data, train_main, predict_main

from UCTB.preprocess.GraphGenerator import scaled_Laplacian_ASTGCN


parser = argparse.ArgumentParser()
parser.add_argument("--config", default='./configurations/PEMS04_astgcn.conf', type=str,
                    help="configuration file path")
parser.add_argument("--dataset", default='Bike', type=str,
                    help="configuration file path")
parser.add_argument("--city", default='NYC', type=str)
parser.add_argument("--closeness_len", default=6, type=int)
parser.add_argument("--period_len", default=7, type=int)
parser.add_argument("--trend_len", default=4, type=int)
parser.add_argument("--data_range", default="all", type=str)
parser.add_argument("--train_data_length", default="all", type=str)
parser.add_argument("--test_ratio", default=0.1, type=float)
parser.add_argument("--MergeIndex", default=1, type=int)
parser.add_argument("--MergeWay", default="sum", type=str)
args = parser.parse_args()

#config
config = configparser.ConfigParser()
print('Read configuration file: %s' % (args.config))
config.read(args.config)
data_config = config['Data']
training_config = config['Training']
adj_filename = data_config['adj_filename']
graph_signal_matrix_filename = data_config['graph_signal_matrix_filename']
batch_size = int(training_config['batch_size'])
num_of_hours = int(training_config['num_of_hours'])
time_strides = num_of_hours
nb_chev_filter = int(training_config['nb_chev_filter'])
nb_time_filter = int(training_config['nb_time_filter'])
in_channels = int(training_config['in_channels'])
nb_block = int(training_config['nb_block'])
K = int(training_config['K'])
loss_function = training_config['loss_function']
metric_method = training_config['metric_method']
missing_value = float(training_config['missing_value'])
if config.has_option('Data', 'id_filename'):
    id_filename = data_config['id_filename']
else:
    id_filename = None
# num_for_predict = int(data_config['num_for_predict'])
num_for_predict = 1
dataset_name = "{}_{}_{}".format(args.dataset, args.city, args.MergeIndex)
model_name = training_config['model_name']

# ctx = training_config['ctx']
# os.environ["CUDA_VISIBLE_DEVICES"] = ctx
deviceIDs = GPUtil.getAvailable(order='last', limit=8, maxLoad=1, maxMemory=0.7,
                                includeNan=False, excludeID=[], excludeUUID=[])
if len(deviceIDs) == 0:
    current_device = "cpu"
else:
    current_device = str(deviceIDs[0])

USE_CUDA = torch.cuda.is_available()
DEVICE = torch.device('cuda:{}'.format(current_device))
print("CUDA:", USE_CUDA, DEVICE)


folder_dir = '%s_channel_%d' % (model_name, in_channels)
print('folder_dir:', folder_dir)
params_path = os.path.join('model_dir', dataset_name, folder_dir)
print('params_path:', params_path)


# loading data
uctb_data_loader = NodeTrafficLoader(dataset=args.dataset, city=args.city,
                                     data_range=args.data_range, train_data_length=args.train_data_length,
                                     test_ratio=float(args.test_ratio),
                                     closeness_len=args.closeness_len,
                                     period_len=args.period_len,
                                     trend_len=args.trend_len,
                                     normalize=False,
                                     MergeIndex=args.MergeIndex,
                                     MergeWay=args.MergeWay)


# Build Graph
graph_obj = GraphGenerator(graph='distance', data_loader=uctb_data_loader)

num_of_vertices = uctb_data_loader.station_number
len_input = uctb_data_loader.closeness_len + \
    uctb_data_loader.period_len + uctb_data_loader.trend_len


#load data
train_loader, train_target_tensor, val_loader, val_target_tensor, test_loader, test_target_tensor= load_data(
    uctb_data_loader, DEVICE, batch_size)
adj_mx = graph_obj.AM[0]
L_tilde = scaled_Laplacian_ASTGCN(adj_mx)

#build model
net = make_model(DEVICE, nb_block, in_channels, K, nb_chev_filter, nb_time_filter, time_strides, L_tilde,
                 num_for_predict, len_input, num_of_vertices)

#train
best_epoch = train_main(training_config, params_path, DEVICE, net, val_loader, train_loader, graph_signal_matrix_filename)

# apply the best model and predict on the test set
test_prediction = predict_main(net, best_epoch, test_loader, test_target_tensor,
                               params_path)

test_prediction = uctb_data_loader.normalizer.inverse_transform(test_prediction)
y_truth = uctb_data_loader.normalizer.inverse_transform(uctb_data_loader.test_y)
test_rmse = metric.rmse(prediction=test_prediction,
                        target=y_truth)
print('Test RMSE', test_rmse)


================================================
FILE: Experiments/ASTGCN/Runner.py
================================================
import os

# #############################################
# # BenchMark Bike
# #############################################
# ########### NYC ###########
# # os.system("python ASTGCN.py --dataset Bike --city NYC --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum")

# os.system("python ASTGCN.py --dataset Bike --city NYC --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum")

# # os.system("python ASTGCN.py --dataset Bike --city NYC --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum")


# # ########### Chicago ###########
# # # os.system("python ASTGCN.py --dataset Bike --city Chicago --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum")

# os.system("python ASTGCN.py --dataset Bike --city Chicago --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum")

# # os.system("python ASTGCN.py --dataset Bike --city Chicago --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum")


# # ########### DC ###########
# # # os.system("python ASTGCN.py --dataset Bike --city DC --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum")

# os.system("python ASTGCN.py --dataset Bike --city DC --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum")

# # os.system("python ASTGCN.py --dataset Bike --city DC --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum")



# # ###############################################
# # # BenchMark DiDi
# # ###############################################
# # ############# Xian #############
# # # os.system("python ASTGCN.py --dataset DiDi --city Xian --MergeIndex 3 --MergeWay sum")

# os.system("python ASTGCN.py --dataset DiDi --city Xian --MergeIndex 6 --MergeWay sum")

# # os.system("python ASTGCN.py --dataset DiDi --city Xian --MergeIndex 12 --MergeWay sum")

# # ############# Chengdu #############
# # # os.system("python ASTGCN.py --dataset DiDi --city Chengdu --MergeIndex 3 --MergeWay sum")

# os.system("python ASTGCN.py --dataset DiDi --city Chengdu --MergeIndex 6 --MergeWay sum")

# # os.system("python ASTGCN.py --dataset DiDi --city Chengdu --MergeIndex 12 --MergeWay sum")



# # ###############################################
# # # BenchMark Metro
# # ###############################################
# # ############# Chongqing #############
# # # os.system("python ASTGCN.py --dataset Metro --city Chongqing --MergeIndex 3 --MergeWay sum")

#os.system("python ASTGCN.py --dataset Metro --city Chongqing --MergeIndex 6 --MergeWay sum")

# # os.system("python ASTGCN.py --dataset Metro --city Chongqing --MergeIndex 12 --MergeWay sum")


# # ############# Shanghai #############
# # # os.system("python ASTGCN.py --dataset Metro --city Shanghai --MergeIndex 3 --MergeWay sum")

# os.system("python ASTGCN.py --dataset Metro --city Shanghai --MergeIndex 6 --MergeWay sum")

# os.system("python ASTGCN.py --dataset Metro --city Shanghai --MergeIndex 12 --MergeWay sum")



# # ###############################################
# # # BenchMark ChargeStation
# # ###############################################

# os.system("python ASTGCN.py --dataset ChargeStation --city Beijing --MergeIndex 1 --MergeWay max")

# os.system("python ASTGCN.py --dataset ChargeStation --city Beijing --MergeIndex 2 --MergeWay max")



# ###############################################
# # BenchMark METR-LA
# ###############################################

# # os.system("python ASTGCN.py --dataset METR --city LA --MergeIndex 3 --MergeWay average")

# os.system("python ASTGCN.py --dataset METR --city LA --MergeIndex 6 --MergeWay average")

# os.system("python ASTGCN.py --dataset METR --city LA --MergeIndex 12 --MergeWay average")


# # ###############################################
# # # BenchMark PEMS-BAY
# # ###############################################
# # # os.system("python ASTGCN.py --dataset PEMS --city BAY --MergeIndex 3 --MergeWay average")

# os.system("python ASTGCN.py --dataset PEMS --city BAY --MergeIndex 6 --MergeWay average")

# # os.system("python ASTGCN.py --dataset PEMS --city BAY --MergeIndex 12 --MergeWay average")


================================================
FILE: Experiments/ASTGCN/configurations/METR_LA_astgcn.conf
================================================
[Data]
adj_filename = ./data/METR_LA/distance_LA.csv
graph_signal_matrix_filename = ./data/METR_LA/METR_LA.npz
num_of_vertices = 207
points_per_hour = 12
num_for_predict = 12
len_input = 12
dataset_name = METR_LA

[Training]
ctx = 0
in_channels = 1
nb_block = 2
K = 3
nb_chev_filter = 64
nb_time_filter = 64
batch_size = 16
model_name = astgcn_r
dataset_name = METR_LA
num_of_weeks = 0
num_of_days = 0
num_of_hours = 1
start_epoch = 0
epochs = 100
learning_rate = 0.001
loss_function = masked_mae
metric_method=mask
missing_value=0.0


================================================
FILE: Experiments/ASTGCN/configurations/PEMS04_astgcn.conf
================================================
[Data]
adj_filename = ./data/PEMS04/distance.csv
graph_signal_matrix_filename = ./data/PEMS04/PEMS04.npz
num_of_vertices = 307
points_per_hour = 12
num_for_predict = 12
len_input = 12
dataset_name = PEMS04

[Training]
ctx = 0
in_channels = 1
nb_block = 2
K = 3
nb_chev_filter = 64
nb_time_filter = 64
batch_size = 32
model_name = astgcn_r
dataset_name = PEMS04
num_of_weeks = 0
num_of_days = 0
num_of_hours = 1
start_epoch = 0
epochs = 400
learning_rate = 0.001
loss_function = mse
metric_method = unmask
missing_value=0.0


================================================
FILE: Experiments/ASTGCN/configurations/PEMS08_astgcn.conf
================================================
[Data]
adj_filename = ../data/PEMS08/PEMS08.csv
graph_signal_matrix_filename = ../data/PEMS08/PEMS08.npz
num_of_vertices = 170
points_per_hour = 12
num_for_predict = 12
len_input = 12
dataset_name = PEMS08

[Training]
ctx = 3
in_channels = 1
nb_block = 2
K = 3
nb_chev_filter = 64
nb_time_filter = 64
batch_size = 32
model_name = astgcn_r
dataset_name = PEMS08
num_of_weeks = 0
num_of_days = 0
num_of_hours = 1
start_epoch = 0
epochs = 80
learning_rate = 0.001
loss_function = mse
metric_method = unmask
missing_value=0.0

================================================
FILE: Experiments/CustomizedDemo/Runner_topk.py
================================================
import os
import numpy as np
import heapq

#############################################
# BenchMark Metro Shangahi (topK graph demo)
#############################################

os.system('python STMeta_Obj_topk.py -m STMeta_v0.model.yml -d metro_shanghai.data.yml '
          '-p graph:TopK,MergeIndex:12')

os.system('python STMeta_Obj_topk.py -m STMeta_v1.model.yml -d metro_shanghai.data.yml '
          '-p graph:Distance-Correlation-Line-TopK,MergeIndex:12')
          



================================================
FILE: Experiments/CustomizedDemo/STMeta_Obj_topk.py
================================================
import os
import nni
import yaml
import argparse
import GPUtil
import numpy as np
from UCTB.dataset import DataSet

from UCTB.dataset import NodeTrafficLoader
from UCTB.model import STMeta
from UCTB.evaluation import metric
from UCTB.preprocess.time_utils import is_work_day_china, is_work_day_america

from UCTB.preprocess import Normalizer, SplitData
#####################################################################
# argument parser
parser = argparse.ArgumentParser(description="Argument Parser")
parser.add_argument('-m', '--model', default='STMeta_v0.model.yml')
parser.add_argument('-d', '--data', default='didi_chengdu.data.yml')
parser.add_argument('-p', '--update_params', default='')

# Parse params
terminal_vars = vars(parser.parse_args())
yml_files = [terminal_vars['model'], terminal_vars['data']]
args = {}
for yml_file in yml_files:
    with open(yml_file, 'r') as f:
        args.update(yaml.load(f))

if len(terminal_vars['update_params']) > 0:
    args.update({e.split(':')[0]: e.split(':')[1] for e in terminal_vars['update_params'].split(',')})
    print({e.split(':')[0]: e.split(':')[1] for e in terminal_vars['update_params'].split(',')})

nni_params = nni.get_next_parameter()
nni_sid = nni.get_sequence_id()
if nni_params:
    args.update(nni_params)
    args['mark'] += str(nni_sid)

#####################################################################
# Generate code_version
code_version = '{}_C{}P{}T{}_G{}_K{}L{}_F{}_{}'.format(args['model_version'],
                                                   args['closeness_len'], args['period_len'],
                                                   args['trend_len'],
                                                   ''.join([e[0] for e in args['graph'].split('-')]),
                                                   args['gcn_k'], args['gcn_layers'],int(args["MergeIndex"])*5, args['mark'])
model_dir_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'model_dir')
model_dir_path = os.path.join(model_dir_path, args['group'])
#####################################################################

data_loader = NodeTrafficLoader(dataset=args['dataset'], city=args['city'],
                                data_range=args['data_range'], train_data_length=args['train_data_length'],
                                test_ratio=0.1,
                                closeness_len=args['closeness_len'],
                                period_len=args['period_len'],
                                trend_len=args['trend_len'],
                                normalize=args['normalize'],
                                with_tpe=True if args['st_method'] == 'gal_gcn' else False,
                                workday_parser=is_work_day_america if args[
                                    'dataset'] == 'Bike' else is_work_day_china,
                                MergeIndex=args['MergeIndex'],
                                MergeWay="max" if args["dataset"] == "ChargeStation" else "sum")


# Call GraphGenerator to initialize and generate LM
from topKGraph import topKGraph
graphBuilder = topKGraph(graph=args['graph'],
                         data_loader=data_loader,
                         threshold_distance=args['threshold_distance'],
                         threshold_correlation=args['threshold_correlation'],
                         threshold_interaction=args['threshold_interaction'],
                         threshold_neighbour=args['threshold_neighbour'])

print("TimeFitness",data_loader.dataset.time_fitness)
print("TimeRange",data_loader.dataset.time_range)

de_normalizer = None if args['normalize'] is False else data_loader.normalizer.min_max_denormal

deviceIDs = GPUtil.getAvailable(order='last', limit=8, maxLoad=1, maxMemory=0.7,
                                includeNan=False, excludeID=[], excludeUUID=[])

if len(deviceIDs) == 0:
    current_device = '-1'
else:
    if nni_params:
        current_device = str(deviceIDs[int(nni_sid) % len(deviceIDs)])
    else:
        current_device = str(deviceIDs[0])

STMeta_obj = STMeta(num_node=data_loader.station_number,
                    num_graph=graphBuilder.LM.shape[0],
                    external_dim=data_loader.external_dim,
                    closeness_len=args['closeness_len'],
                    period_len=args['period_len'],
                    trend_len=args['trend_len'],
                    gcn_k=int(args.get('gcn_k', 0)),
                    gcn_layers=int(args.get('gcn_layers', 0)),
                    gclstm_layers=int(args['gclstm_layers']),
                    num_hidden_units=args['num_hidden_units'],
                    num_dense_units=args['num_filter_conv1x1'],
                    # temporal attention parameters
                    tpe_dim=data_loader.tpe_dim,
                    temporal_gal_units=args.get('temporal_gal_units'),
                    temporal_gal_num_heads=args.get('temporal_gal_num_heads'),
                    temporal_gal_layers=args.get('temporal_gal_layers'),
                    # merge parameters
                    graph_merge_gal_units=args['graph_merge_gal_units'],
                    graph_merge_gal_num_heads=args['graph_merge_gal_num_heads'],
                    temporal_merge_gal_units=args['temporal_merge_gal_units'],
                    temporal_merge_gal_num_heads=args['temporal_merge_gal_num_heads'],
                    # network structure parameters
                    st_method=args['st_method'],  # gclstm
                    temporal_merge=args['temporal_merge'],  # gal
                    graph_merge=args['graph_merge'],  # concat
                    build_transfer=args['build_transfer'],
                    lr=float(args['lr']),
                    code_version=code_version,
                    model_dir=model_dir_path,
                    gpu_device=current_device)

STMeta_obj.build()

print(args['dataset'], args['city'], code_version)
print('Number of trainable variables', STMeta_obj.trainable_vars)
print('Number of training samples', data_loader.train_sequence_len)

# # Training
if args['train']:
    STMeta_obj.fit(closeness_feature=data_loader.train_closeness,
                   period_feature=data_loader.train_period,
                   trend_feature=data_loader.train_trend,
                   laplace_matrix=graphBuilder.LM,
                   target=data_loader.train_y,
                   external_feature=data_loader.train_ef,
                   sequence_length=data_loader.train_sequence_len,
                   output_names=('loss', ),
                   evaluate_loss_name='loss',
                   op_names=('train_op', ),
                   batch_size=int(args['batch_size']),
                   max_epoch=int(args['max_epoch']),
                   validate_ratio=0.1,
                   early_stop_method='t-test',
                   early_stop_length=args['early_stop_length'],
                   early_stop_patience=args['early_stop_patience'],
                   verbose=True,
                   save_model=True)

STMeta_obj.load(code_version)

prediction = STMeta_obj.predict(closeness_feature=data_loader.test_closeness,
                                period_feature=data_loader.test_period,
                                trend_feature=data_loader.test_trend,
                                laplace_matrix=graphBuilder.LM,
                                target=data_loader.test_y,
                                external_feature=data_loader.test_ef,
                                output_names=('prediction', ),
                                sequence_length=data_loader.test_sequence_len,
                                cache_volume=int(args['batch_size']), )

test_prediction = prediction['prediction']

if de_normalizer:
    test_prediction = de_normalizer(test_prediction)
    data_loader.test_y = de_normalizer(data_loader.test_y)

test_rmse, test_mape = metric.rmse(prediction=test_prediction, target=data_loader.test_y),\
                       metric.mape(prediction=test_prediction, target=data_loader.test_y, threshold=0)

# Evaluate
val_loss = STMeta_obj.load_event_scalar('val_loss')

best_val_loss = min([e[-1] for e in val_loss])

if de_normalizer:
    best_val_loss = de_normalizer(best_val_loss)

print('Best val result', best_val_loss)
print('Test result', test_rmse, test_mape)

time_consumption = [val_loss[e][0] - val_loss[e-1][0] for e in range(1, len(val_loss))]
time_consumption = sum([e for e in time_consumption if e < (min(time_consumption) * 10)]) / 3600
print('Converged using %.2f hour / %s epochs' % (time_consumption, STMeta_obj._global_step))


if nni_params:
    nni.report_final_result({
        'default': best_val_loss,
        'test-rmse': test_rmse,
        'test-mape': test_mape
    })


================================================
FILE: Experiments/CustomizedDemo/STMeta_v0.model.yml
================================================
# network structure parameters
st_method: 'LSTM'
temporal_merge: 'gal'
graph_merge: 'gal'

# gcn parameters
gcn_k: 0
gcn_layers: 1
gclstm_layers: 1

# LSTM units
num_hidden_units: 64
# dense units
num_filter_conv1x1: 32

build_transfer: False

# merge parameters
graph_merge_gal_units: 64
graph_merge_gal_num_heads: 2
temporal_merge_gal_units: 64
temporal_merge_gal_num_heads: 2

model_version: 'TMeta'

================================================
FILE: Experiments/CustomizedDemo/STMeta_v1.model.yml
================================================
# network structure parameters
st_method: 'GCLSTM'
temporal_merge: 'gal'
graph_merge: 'gal'

# gcn parameters
gcn_k: 1
gcn_layers: 1
gclstm_layers: 1

# LSTM units
num_hidden_units: 64
# dense units
num_filter_conv1x1: 32

build_transfer: False

# merge parameters
graph_merge_gal_units: 64
graph_merge_gal_num_heads: 2
temporal_merge_gal_units: 64
temporal_merge_gal_num_heads: 2

model_version: 'V1'

================================================
FILE: Experiments/CustomizedDemo/STMeta_v2.model.yml
================================================
# network structure parameters
st_method: 'GCLSTM'
temporal_merge: 'concat'
graph_merge: 'gal'

# gcn parameters
gcn_k: 1
gcn_layers: 1
gclstm_layers: 1

# LSTM units
num_hidden_units: 64
# dense units
num_filter_conv1x1: 32

build_transfer: False

# merge parameters
graph_merge_gal_units: 64
graph_merge_gal_num_heads: 2
temporal_merge_gal_units: 64
temporal_merge_gal_num_heads: 2

model_version: 'V2'

================================================
FILE: Experiments/CustomizedDemo/STMeta_v3.model.yml
================================================
# network structure parameters
st_method: 'DCRNN'
temporal_merge: 'gal'
graph_merge: 'gal'

# gcn parameters
gcn_k: 1
gcn_layers: 1
gclstm_layers: 1

# LSTM units
num_hidden_units: 64
# dense units
num_filter_conv1x1: 32

# temporal process params
temporal_gal_units: 32
temporal_gal_num_heads: 2
temporal_gal_layers: 4

build_transfer: False

# merge parameters
graph_merge_gal_units: 64
graph_merge_gal_num_heads: 2
temporal_merge_gal_units: 64
temporal_merge_gal_num_heads: 2

model_version: 'V3'

================================================
FILE: Experiments/CustomizedDemo/metro_shanghai.data.yml
================================================
# dataset and city
dataset: Metro
city: Shanghai

closeness_len: 6
period_len: 7
trend_len: 4

graph: Distance-Correlation-Line

data_range: all
train_data_length: all

threshold_distance: 5000
threshold_correlation: 0.7
threshold_interaction: 30
threshold_neighbour: 23

normalize: True
train: True

lr: 1e-5
early_stop_length: 200
early_stop_patience: 0.1
max_epoch: 20000
batch_size: 64

group: Shanghai
mark: BM

================================================
FILE: Experiments/CustomizedDemo/topKGraph.py
================================================
import heapq
import numpy as np
from UCTB.preprocess.GraphGenerator import GraphGenerator


class topKGraph(GraphGenerator):  # Init NodeTrafficLoader

    def __init__(self,**kwargs):

        super(topKGraph, self).__init__(**kwargs)
        
        for graph_name in kwargs['graph'].split('-'):
            if graph_name.lower() == 'topk':
                lat_lng_list = np.array([[float(e1) for e1 in e[2:4]]
                                         for e in self.dataset.node_station_info])
                # Handling
                AM = self.neighbour_adjacent(lat_lng_list[self.traffic_data_index],
                                        threshold=int(kwargs['threshold_neighbour']))
                LM = self.adjacent_to_laplacian(AM)

                if self.AM.shape[0] == 0:  # Make AM
                    self.AM = np.array([AM], dtype=np.float32)
                else:
                    self.AM = np.vstack((self.AM, (AM[np.newaxis, :])))

                if self.LM.shape[0] == 0:  # Make LM
                    self.LM = np.array([LM], dtype=np.float32)
                else:
                    self.LM = np.vstack((self.LM, (LM[np.newaxis, :])))

    def neighbour_adjacent(self, lat_lng_list, threshold):
        adjacent_matrix = np.zeros([len(lat_lng_list), len(lat_lng_list)])
        for i in range(len(lat_lng_list)):
            for j in range(len(lat_lng_list)):
                adjacent_matrix[i][j] = self.haversine(
                    lat_lng_list[i][0], lat_lng_list[i][1], lat_lng_list[j][0], lat_lng_list[j][1])
        dis_matrix = adjacent_matrix.astype(np.float32)

        for i in range(len(dis_matrix)):
            ind = heapq.nlargest(threshold, range(len(dis_matrix[i])), dis_matrix[i].take)
            dis_matrix[i] = np.array([0 for _ in range(len(dis_matrix[i]))])
            dis_matrix[i][ind] = 1
        adjacent_matrix = (adjacent_matrix == 1).astype(np.float32)
        return adjacent_matrix


================================================
FILE: Experiments/DCRNN/DCRNN.py
================================================
import os
import numpy as np

from UCTB.dataset import NodeTrafficLoader
from UCTB.model import DCRNN
from UCTB.evaluation import metric

from UCTB.preprocess.GraphGenerator import GraphGenerator

class my_data_loader(NodeTrafficLoader):

    def __init__(self, **kwargs):

        super(my_data_loader, self).__init__(**kwargs) 
        
        # generate LM
        graph_obj = GraphGenerator(graph=kwargs['graph'], data_loader=self)
        self.AM = graph_obj.AM
        self.LM = graph_obj.LM

    def diffusion_matrix(self, filter_type='random_walk'):
        def calculate_random_walk_matrix(adjacent_mx):
            d = np.array(adjacent_mx.sum(1))
            d_inv = np.power(d, -1).flatten()
            d_inv[np.isinf(d_inv)] = 0.
            d_mat_inv = np.diag(d_inv)
            random_walk_mx = d_mat_inv.dot(adjacent_mx)
            return random_walk_mx

        diffusion_matrix = []
        if filter_type == "random_walk":
            diffusion_matrix.append(calculate_random_walk_matrix(self.AM[0]).T)
        elif filter_type == "dual_random_walk":
            diffusion_matrix.append(calculate_random_walk_matrix(self.AM[0]).T)
            diffusion_matrix.append(calculate_random_walk_matrix(self.AM[0].T).T)
        return np.array(diffusion_matrix, dtype=np.float32)


def param_parser():
    import argparse
    parser = argparse.ArgumentParser(description="Argument Parser")
    # data source
    parser.add_argument('--Dataset', default='DiDi')
    parser.add_argument('--City', default='Chengdu')
    # network parameter
    parser.add_argument('--CT', default='6', type=int)
    parser.add_argument('--PT', default='7', type=int)
    parser.add_argument('--TT', default='4', type=int)
    parser.add_argument('--K', default='1', type=int)
    parser.add_argument('--L', default='1', type=int)
    parser.add_argument('--Graph', default='Distance-Correlation-Interaction')
    parser.add_argument('--LSTMUnits', default='64', type=int)
    parser.add_argument('--LSTMLayers', default='3', type=int)
    # Training data parameters
    parser.add_argument('--DataRange', default='All')
    parser.add_argument('--TrainDays', default='365')
    parser.add_argument('--test_ratio', default=0.1, type=float)
    # Graph parameter
    parser.add_argument('--TC', default='0', type=float)
    parser.add_argument('--TD', default='1000', type=float)
    parser.add_argument('--TI', default='500', type=float)
    # training parameters
    parser.add_argument('--Epoch', default='5000', type=int)
    parser.add_argument('--Train', default='True')
    parser.add_argument('--lr', default='5e-4', type=float)
    parser.add_argument('--ESlength', default='50', type=int)
    parser.add_argument('--patience', default='0.1', type=float)
    parser.add_argument('--BatchSize', default='64', type=int)
    # device parameter
    parser.add_argument('--Device', default='0', type=str)
    # version control
    parser.add_argument('--Group', default='DebugGroup')
    parser.add_argument('--CodeVersion', default='ST_MGCN_Debug')
    # Merge parameter
    parser.add_argument('--MergeIndex', default=6, type=int)
    parser.add_argument('--MergeWay', default='sum', type=str)
    return parser


parser = param_parser()
args = parser.parse_args()

model_dir = os.path.join('model_dir', args.City)
code_version = 'DCRNN_{}_K{}L{}_{}_F{}'.format(''.join([e[0] for e in args.Graph.split('-')]),
                                           args.K, args.L, args.CodeVersion, int(args.MergeIndex)*5)

data_loader = my_data_loader(dataset=args.Dataset, city=args.City,
                             test_ratio=float(args.test_ratio),
                             data_range=args.DataRange, train_data_length=args.TrainDays,
                             closeness_len=int(args.CT), period_len=int(args.PT), trend_len=int(args.TT),
                             threshold_interaction=args.TI, threshold_distance=args.TD,
                             threshold_correlation=args.TC, graph=args.Graph, with_lm=True, normalize=True, MergeIndex=args.MergeIndex,
                             MergeWay=args.MergeWay)

print('Code version', args.Dataset, args.City, code_version)

print('Number of training samples', data_loader.train_sequence_len)

diffusion_matrix = data_loader.diffusion_matrix()

DCRNN_Obj = DCRNN(num_node=data_loader.station_number,
                  num_diffusion_matrix=diffusion_matrix.shape[0],
                  num_rnn_units=args.LSTMUnits,
                  num_rnn_layers=args.LSTMLayers,
                  max_diffusion_step=args.K,
                  seq_len=data_loader.closeness_len + data_loader.period_len + data_loader.trend_len,
                  use_curriculum_learning=False,
                  input_dim=1,
                  output_dim=1,
                  cl_decay_steps=1000,
                  target_len=1,
                  lr=args.lr,
                  epsilon=1e-3,
                  optimizer_name='Adam',
                  code_version=code_version,
                  model_dir=model_dir,
                  gpu_device=args.Device)

# Build tf-graph
DCRNN_Obj.build()

print('Number of trainable parameters', DCRNN_Obj.trainable_vars)

# Training
DCRNN_Obj.fit(inputs=
                # np.concatenate((
                    # data_loader.train_trend.transpose([0, 2, 1, 3]),
                    # data_loader.train_period.transpose([0, 2, 1, 3]),
                    data_loader.train_closeness.transpose([0, 2, 1, 3]),
                # ), axis=1),
              diffusion_matrix=diffusion_matrix,
              target=data_loader.train_y.reshape([-1, 1, data_loader.station_number, 1]),
              batch_size=args.BatchSize,
              sequence_length=data_loader.train_sequence_len,
              early_stop_length=args.ESlength,
              max_epoch=args.Epoch)

# Predict
prediction = DCRNN_Obj.predict(inputs=
                    # np.concatenate((
                    # data_loader.test_trend.transpose([0, 2, 1, 3]),
                    # data_loader.test_period.transpose([0, 2, 1, 3]),
                    data_loader.test_closeness.transpose([0, 2, 1, 3]),
                    # ), axis=1),
                               diffusion_matrix=diffusion_matrix,
                               target=data_loader.test_y.reshape([-1, 1, data_loader.station_number, 1]),
                               sequence_length=data_loader.test_sequence_len,
                               output_names=['prediction'])

# Evaluate
print('Test result', metric.rmse(prediction=data_loader.normalizer.inverse_transform(prediction['prediction']),
                                 target=data_loader.normalizer.inverse_transform(data_loader.test_y.transpose([0, 2, 1]))))

val_loss = DCRNN_Obj.load_event_scalar('val_loss')

best_val_loss = min([e[-1] for e in val_loss])

best_val_loss = data_loader.normalizer.inverse_transform(best_val_loss)

print('Best val result', best_val_loss)

time_consumption = [val_loss[e][0] - val_loss[e-1][0] for e in range(1, len(val_loss))]
time_consumption = sum([e for e in time_consumption if e < (min(time_consumption) * 10)]) / 3600
print('Converged using %.2f hour / %s epochs' % (time_consumption, DCRNN_Obj._global_step))

================================================
FILE: Experiments/DCRNN/bike_trial.py
================================================
import os

import warnings
warnings.filterwarnings("ignore")

shared_params_dcrnn = ('python DCRNN.py '
                         '--Dataset Bike '
                         '--CT 6 '
                         '--PT 0 '
                         '--TT 0 '
                         '--K 1 '
                         '--LSTMUnits 64 '
                         '--LSTMLayers 1 '
                         '--DataRange All '
                         '--TrainDays 365 '
                         '--TC 0 '
                         '--TD 1000 '
                         '--TI 500 '
                         '--Epoch 10000 '
                         '--Train True '
                         '--lr 5e-4 '
                         '--patience 0.1 '
                         '--ESlength 100 '
                         '--BatchSize 32 '
                         '--MergeWay sum '
                         '--Device 0 '
                         '--CodeVersion V0')

if __name__ == "__main__":
    os.system(shared_params_dcrnn + ' --City NYC --Graph Distance --MergeIndex 1 --DataRange 0.125 --TrainDays 60')

    os.system(shared_params_dcrnn + ' --City DC --Graph Distance --MergeIndex 1 --DataRange 0.125 --TrainDays 60')

    os.system(shared_params_dcrnn + ' --City Chicago --Graph Distance --MergeIndex 1 --DataRange 0.125 --TrainDays 60')
    

    os.system(shared_params_dcrnn + ' --City NYC --Graph Distance --MergeIndex 3 --DataRange 0.25 --TrainDays 91')

    os.system(shared_params_dcrnn + ' --City DC --Graph Distance --MergeIndex 3 --DataRange 0.25 --TrainDays 91')

    os.system(shared_params_dcrnn + ' --City Chicago --Graph Distance --MergeIndex 3 --DataRange 0.25 --TrainDays 91')


    os.system(shared_params_dcrnn + ' --City NYC --Graph Distance --MergeIndex 6 --DataRange 0.5 --TrainDays 183')

    os.system(shared_params_dcrnn + ' --City DC --Graph Distance --MergeIndex 6 --DataRange 0.5 --TrainDays 183')

    os.system(shared_params_dcrnn + ' --City Chicago --Graph Distance --MergeIndex 6 --DataRange 0.5 --TrainDays 183')
    

    os.system(shared_params_dcrnn + ' --City NYC --Graph Distance --MergeIndex 12')

    os.system(shared_params_dcrnn + ' --City DC --Graph Distance --MergeIndex 12')

    os.system(shared_params_dcrnn + ' --City Chicago --Graph Distance --MergeIndex 12')

================================================
FILE: Experiments/DCRNN/cs_trial.py
================================================
import os

import warnings
warnings.filterwarnings("ignore")

shared_params_dcrnn = ('python DCRNN.py '
                         '--Dataset ChargeStation '
                         '--CT 6 '
                         '--PT 0 '
                         '--TT 0 '
                         '--LSTMUnits 64 '
                         '--LSTMLayers 1 '
                         '--DataRange All '
                         '--TrainDays 365 '
                         '--TC 0.1 '
                         '--TD 1000 '
                         '--TI 500 '
                         '--Epoch 10000 '
                         '--Train True '
                         '--lr 5e-4 '
                         '--patience 0.1 '
                         '--ESlength 100 '
                         '--BatchSize 16 '
                         '--MergeWay max '
                         '--Device 1 ')

if __name__ == "__main__":
    """
    Multiple Graphes
    """
    os.system(shared_params_dcrnn + ' --City Beijing --K 1 --L 1 '
                                      ' --Graph Distance --MergeIndex 1')

    os.system(shared_params_dcrnn + ' --City Beijing --K 1 --L 1 '
                                        ' --Graph Distance --MergeIndex 2')

================================================
FILE: Experiments/DCRNN/didi_trial.py
================================================
import os

import warnings
warnings.filterwarnings("ignore")

shared_params_st_mgcn = ('python DCRNN.py '
                         '--Dataset DiDi '
                         '--CT 6 '
                         '--PT 0 '
                         '--TT 0 '
                         '--K 1 '
                         '--LSTMUnits 64 '
                         '--LSTMLayers 1 '
                         '--DataRange All '
                         '--TrainDays 365 '
                         '--TC 0.65 '
                         '--TD 7500 '
                         '--TI 30 '
                         '--Epoch 10000 '
                         '--Train True '
                         '--lr 1e-4 '
                         '--patience 0.1 '
                         '--ESlength 100 '
                         '--BatchSize 16 '
                         '--MergeWay sum '
                         '--Device 1 '
                         '--CodeVersion V0')

if __name__ == "__main__":
    os.system(shared_params_st_mgcn + ' --City Chengdu --Graph Distance --MergeIndex 1')

    os.system(shared_params_st_mgcn + ' --City Xian --Graph Distance --MergeIndex 1')

    os.system(shared_params_st_mgcn + ' --City Chengdu --Graph Distance --MergeIndex 3')

    os.system(shared_params_st_mgcn + ' --City Xian --Graph Distance --MergeIndex 3')

    os.system(shared_params_st_mgcn + ' --City Chengdu --Graph Distance --MergeIndex 6')

    os.system(shared_params_st_mgcn + ' --City Xian --Graph Distance --MergeIndex 6')

    os.system(shared_params_st_mgcn + ' --City Chengdu --Graph Distance --MergeIndex 12')

    os.system(shared_params_st_mgcn + ' --City Xian --Graph Distance --MergeIndex 12')

================================================
FILE: Experiments/DCRNN/metr_trial.py
================================================
import os

import warnings
warnings.filterwarnings("ignore")

shared_params_st_mgcn = ('python DCRNN.py '
                         '--Dataset METR '
                         '--CT 6 '
                         '--PT 0 '
                         '--TT 0 '
                         '--K 1 '
                         '--LSTMUnits 64 '
                         '--LSTMLayers 1 '
                         '--DataRange All '
                         '--TrainDays All '
                         '--TC 0.7 '
                         '--TD 5500 '
                         '--TI 30 '
                         '--Epoch 20000 '
                         '--test_ratio 0.2 '
                         '--Train True '
                         '--lr 1e-4 '
                         '--patience 0.1 '
                         '--ESlength 100 '
                         '--BatchSize 16 '
                         '--MergeWay average '
                         '--Device 1 '
                         '--CodeVersion V0')

if __name__ == "__main__":
    os.system(shared_params_st_mgcn + ' --City LA --Graph Distance --MergeIndex 1')

    os.system(shared_params_st_mgcn + ' --City LA --Graph Distance --MergeIndex 3')

    os.system(shared_params_st_mgcn + ' --City LA --Graph Distance --MergeIndex 6')

    os.system(shared_params_st_mgcn + ' --City LA --Graph Distance --MergeIndex 12')

  

================================================
FILE: Experiments/DCRNN/metro_trial.py
================================================
import os

import warnings
warnings.filterwarnings("ignore")

shared_params_st_mgcn = ('python DCRNN.py '
                         '--Dataset Metro '
                         '--CT 6 '
                         '--PT 0 '
                         '--TT 0 '
                         '--K 1 '
                         '--LSTMUnits 64 '
                         '--LSTMLayers 1 '
                         '--DataRange All '
                         '--TrainDays 365 '
                         '--TC 0.7 '
                         '--TD 5000 '
                         '--TI 30 '
                         '--Epoch 20000 '
                         '--Train True '
                         '--lr 1e-4 '
                         '--patience 0.1 '
                         '--ESlength 100 '
                         '--BatchSize 16 '
                         '--MergeWay sum '
                         '--Device 1 '
                         '--CodeVersion V0')

if __name__ == "__main__":
    os.system(shared_params_st_mgcn + ' --City Shanghai --Graph Distance --MergeIndex 1')

    os.system(shared_params_st_mgcn + ' --City Chongqing --Graph Distance --MergeIndex 1')

    os.system(shared_params_st_mgcn + ' --City Shanghai --Graph Distance --MergeIndex 3')

    os.system(shared_params_st_mgcn + ' --City Chongqing --Graph Distance --MergeIndex 3')

    os.system(shared_params_st_mgcn + ' --City Shanghai --Graph Distance --MergeIndex 6')

    os.system(shared_params_st_mgcn + ' --City Chongqing --Graph Distance --MergeIndex 6')

    os.system(shared_params_st_mgcn + ' --City Shanghai --Graph Distance --MergeIndex 12')

    os.system(shared_params_st_mgcn + ' --City Chongqing --Graph Distance --MergeIndex 12')

================================================
FILE: Experiments/DCRNN/pems_trial.py
================================================
import os

import warnings
warnings.filterwarnings("ignore")

shared_params_st_mgcn = ('python DCRNN.py '
                         '--Dataset PEMS '
                         '--CT 6 '
                         '--PT 0 '
                         '--TT 0 '
                         '--K 1 '
                         '--LSTMUnits 64 '
                         '--LSTMLayers 1 '
                         '--DataRange All '
                         '--TrainDays 365 '
                         '--TC 0.7 '
                         '--TD 5500 '
                         '--TI 30 '
                         '--Epoch 20000 '
                         '--test_ratio 0.2 '
                         '--Train True '
                         '--lr 1e-4 '
                         '--patience 0.1 '
                         '--ESlength 100 '
                         '--BatchSize 16 '
                         '--MergeWay average '
                         '--Device 1 '
                         '--CodeVersion V0')

if __name__ == "__main__":
    os.system(shared_params_st_mgcn + ' --City BAY --Graph Distance --MergeIndex 1')

    os.system(shared_params_st_mgcn + ' --City BAY --Graph Distance --MergeIndex 3')

    os.system(shared_params_st_mgcn + ' --City BAY --Graph Distance --MergeIndex 6')

    os.system(shared_params_st_mgcn + ' --City BAY --Graph Distance --MergeIndex 12')

  

================================================
FILE: Experiments/DCRNN/street_didi_trial.py
================================================
import os

import warnings
warnings.filterwarnings("ignore")

shared_params_st_mgcn = ('python DCRNN.py '
                         '--Dataset DiDi '
                         '--CT 6 '
                         '--PT 0 '
                         '--TT 0 '
                         '--K 1 '
                         '--LSTMUnits 64 '
                         '--LSTMLayers 1 '
                         '--DataRange All '
                         '--TrainDays 365 '
                         '--TC 0.65 '
                         '--TD 7500 '
                         '--TI 30 '
                         '--Epoch 10000 '
                         '--Train True '
                         '--lr 1e-4 '
                         '--patience 0.1 '
                         '--ESlength 100 '
                         '--BatchSize 16 '
                         '--MergeWay sum '
                         '--Device 1 '
                         '--CodeVersion V0')

if __name__ == "__main__":

    os.system(shared_params_st_mgcn + ' --City Chengdu_Street --Graph Distance --MergeIndex 3')

    os.system(shared_params_st_mgcn + ' --City Xian_Street --Graph Distance --MergeIndex 3')

    os.system(shared_params_st_mgcn + ' --City Chengdu_Street --Graph Distance --MergeIndex 6')

    os.system(shared_params_st_mgcn + ' --City Xian_Street --Graph Distance --MergeIndex 6')

    os.system(shared_params_st_mgcn + ' --City Chengdu_Street --Graph Distance --MergeIndex 12')

    os.system(shared_params_st_mgcn + ' --City Xian_Street --Graph Distance --MergeIndex 12')

================================================
FILE: Experiments/DeepST/DeepST.py
================================================
import nni

from UCTB.dataset import GridTrafficLoader
from UCTB.model import DeepST
from UCTB.evaluation import metric

args = {
    'dataset': 'DiDi',
    'city': 'Xian',
    'num_conv_filters': 64,
    'kernel_size': 3,
    'lr': 5e-5,
    'batch_size': 64,
    'MergeIndex': 6,
}

code_version = 'DeepST_{}_{}_F{}'.format(args['dataset'], args['city'], int(args['MergeIndex'])*5)

nni_params = nni.get_next_parameter()
nni_sid = nni.get_sequence_id()
if nni_params:
    args.update(nni_params)
    code_version += ('_' + str(nni_sid))

# Config data loader
data_loader = GridTrafficLoader(dataset=args['dataset'], city=args['city'],
                                closeness_len=6, period_len=7, trend_len=4,MergeIndex=args['MergeIndex'])

deep_st_obj = DeepST(closeness_len=data_loader.closeness_len,
                     period_len=data_loader.period_len,
                     trend_len=data_loader.trend_len,
                     external_dim=data_loader.external_dim,
                     num_conv_filters=args['num_conv_filters'], kernel_size=args['kernel_size'],
                     code_version=code_version,
                     width=data_loader.width, height=data_loader.height, lr=args['lr'])

deep_st_obj.build()

print('Trainable variables', deep_st_obj.trainable_vars)

# Training
deep_st_obj.fit(closeness_feature=data_loader.train_closeness,
                period_feature=data_loader.train_period,
                trend_feature=data_loader.train_trend,
                target=data_loader.train_y,
                external_feature=data_loader.train_ef,
                sequence_length=data_loader.train_sequence_len,
                batch_size=args['batch_size'],
                validate_ratio=0.1)

# Predict
prediction = deep_st_obj.predict(closeness_feature=data_loader.test_closeness,
                                 period_feature=data_loader.test_period,
                                 trend_feature=data_loader.test_trend,
                                 target=data_loader.test_y,
                                 external_feature=data_loader.test_ef,
                                 sequence_length=data_loader.test_sequence_len)

test_rmse = metric.rmse(prediction=data_loader.normalizer.inverse_transform(prediction['prediction']),
                        target=data_loader.normalizer.inverse_transform(data_loader.test_y))

# Compute metric
print('Test result', test_rmse)

# Evaluate
val_loss = deep_st_obj.load_event_scalar('val_loss')

best_val_loss = min([e[-1] for e in val_loss])
# best_val_loss = data_loader.normalizer.inverse_transform(best_val_loss)

print('Best val result', best_val_loss)
print('Test result', test_rmse)

print('Converged using %.2f hour' % ((val_loss[-1][0] - val_loss[0][0]) / 3600))
if nni_params:
    nni.report_final_result({
        'default': best_val_loss,
        'test-rmse': test_rmse
    })

================================================
FILE: Experiments/DeepST/param_search.yml
================================================
authorName: DiChai
experimentName: search_space
trialConcurrency: 1
maxExecDuration: 24h
maxTrialNum: 50
trainingServicePlatform: local
# The path to Search Space
searchSpacePath: search_space.json
useAnnotation: false
tuner:
  builtinTunerName: GridSearch
# The path and the running command of trial
trial:
  command: python DeepST.py
  codeDir: .
  gpuNum: 1

================================================
FILE: Experiments/DeepST/search_space.json
================================================
{
    "num_conv_filters": {"_type":"choice","_value":[32, 64, 128]},

    "kernel_size": {"_type":"choice","_value":[3, 4, 5]},

    "lr": {"_type":"choice","_value":[0.0001, 0.00002, 0.00004, 0.00008, 0.00001]},

    "batch_size": {"_type":"choice","_value":[32, 64, 128, 256]}
}

================================================
FILE: Experiments/GBRT/GBRT.py
================================================
import numpy as np
import argparse
from sklearn.ensemble import GradientBoostingRegressor
from UCTB.dataset import NodeTrafficLoader
from UCTB.evaluation import metric
from UCTB.preprocess import SplitData
import nni
import os

params = {
    'CT': 12,
    'PT': 14,
    'TT': 1,
    'max_depth': 7,
    'num_boost_round': 182
}


parser = argparse.ArgumentParser(description="Argument Parser")
# data source
parser = argparse.ArgumentParser(description="Argument Parser")
parser.add_argument('--dataset', default='Metro', type=str)
parser.add_argument('--city', default='Chongqing',type=str)
parser.add_argument('--MergeIndex', default=3)
parser.add_argument('--DataRange', default="all")
parser.add_argument('--TrainDays', default="all")
parser.add_argument('--MergeWay', default="sum")
parser.add_argument('--test_ratio', default=0.1, type=float)

#use params and args to show its difference
args = vars(parser.parse_args())

params.update(nni.get_next_parameter())


data_loader = NodeTrafficLoader(dataset=args["dataset"], city=args['city'], closeness_len=int(params['CT']), period_len=int(params['PT']), trend_len=int(params['TT']),
                                data_range=args['DataRange'], train_data_length=args['TrainDays'],
                                test_ratio=args['test_ratio'],
                                with_lm=False, normalize=False, MergeIndex=args['MergeIndex'],
                                MergeWay=args['MergeWay'])


train_closeness, val_closeness = SplitData.split_data(
    data_loader.train_closeness, [0.9, 0.1])
train_period, val_period = SplitData.split_data(
    data_loader.train_period, [0.9, 0.1])
train_trend, val_trend = SplitData.split_data(
    data_loader.train_trend, [0.9, 0.1])

train_y, val_y = SplitData.split_data(data_loader.train_y, [0.9, 0.1])

prediction_test = []
prediction_val = []


for i in range(data_loader.station_number):

    print('*************************************************************')
    print('Station', i)

    model = GradientBoostingRegressor(n_estimators=int(params['num_boost_round']), max_depth=int(params['max_depth']))

    X_Train = []
    X_Val = []
    X_Test = []
    if int(params['CT']) > 0:
        X_Train.append(train_closeness[:, i, :, 0])
        X_Val.append(val_closeness[:, i, :, 0])
        X_Test.append(data_loader.test_closeness[:, i, :, 0])
    if int(params['PT']) > 0:
        X_Train.append(train_period[:, i, :, 0])
        X_Val.append(val_period[:, i, :, 0])
        X_Test.append(data_loader.test_period[:, i, :, 0])
    if int(params['TT']) > 0:
        X_Train.append(train_trend[:, i, :, 0])
        X_Val.append(val_trend[:, i, :, 0])
        X_Test.append(data_loader.test_trend[:, i, :, 0])

    X_Train = np.concatenate(X_Train, axis=-1)
    X_Val = np.concatenate(X_Val, axis=-1)
    X_Test = np.concatenate(X_Test, axis=-1)

    model.fit(X_Train, train_y[:, i, 0])

    p_val = model.predict(X_Val)
    p_test = model.predict(X_Test)

    prediction_test.append(p_test.reshape([-1, 1, 1]))
    prediction_val.append(p_val.reshape([-1, 1, 1]))

prediction_test = np.concatenate(prediction_test, axis=-2)
prediction_val = np.concatenate(prediction_val, axis=-2)

print('Val RMSE', metric.rmse(prediction_val, val_y))
print('Test RMSE', metric.rmse(prediction_test, data_loader.test_y))

nni.report_final_result({'default': metric.rmse(prediction_val, val_y),
                         'test-rmse': metric.rmse(prediction_test, data_loader.test_y)})


================================================
FILE: Experiments/GBRT/gbrt_config.yml
================================================
authorName: lychen
experimentName: gbrt_parameter_search
trialConcurrency: 1
maxExecDuration: 72h
maxTrialNum: 200
trainingServicePlatform: local
# The path to Search Space
searchSpacePath: gbrt_search_space.json
useAnnotation: false
tuner:
  builtinTunerName: TPE
# The path and the running command of trial
trial:
  # python GBRT.py --dataset Metro --city Chongqing --MergeIndex 12 --MergeWay sum --DataRange all --TrainDays all
  # python GBRT.py --dataset DiDi --city Xian --MergeIndex 12 --MergeWay sum --DataRange all --TrainDays all
  # python GBRT.py --dataset Bike --city NYC --MergeIndex 12 --MergeWay sum --DataRange all --TrainDays 365
  # python GBRT.py --dataset ChargeStation --city Beijing --MergeIndex 1 --MergeWay max --DataRange all --TrainDays all
  # python GBRT.py --dataset Taxi --city BJ --MergeIndex 2 --MergeWay sum --DataRange all --TrainDays all
  # python GBRT.py --dataset METR --city LA --MergeIndex 3 --MergeWay average --DataRange all --TrainDays all --test_ratio 0.2
  # python GBRT.py --dataset PEMS --city BAY --MergeIndex 3 --MergeWay average --DataRange all --TrainDays all --test_ratio 0.2
  # python GBRT.py --dataset PEMS --city BAY --MergeIndex 12 --MergeWay average --DataRange all --TrainDays all --test_ratio 0.2
  command:  python GBRT.py --dataset DiDi --city Chengdu_Street --MergeIndex 12 --MergeWay sum --DataRange all --TrainDays all
  codeDir: .
  gpuNum: 0 

================================================
FILE: Experiments/GBRT/gbrt_search_space.json
================================================
{

    "CT": {"_type": "randint", "_value": [0,13]},
    "PT": {"_type": "randint", "_value": [0,15]},
    "TT": {"_type": "randint", "_value": [0,5]},

    "max_depth": {"_type":"randint","_value":[1, 11]},
    "num_boost_round": {"_type":"randint","_value":[10, 201]}
}

================================================
FILE: Experiments/GMAN/GMAN.py
================================================
import time
import argparse
import os

from UCTB.evaluation import metric
from UCTB.model.GMAN import Graph
from UCTB.dataset import NodeTrafficLoader
from UCTB.preprocess.GraphGenerator import GraphGenerator
from UCTB.utils.utils_GMAN import *

#args config
parser = argparse.ArgumentParser()
# data loader parameters
parser.add_argument("--dataset", default='Bike', type=str)
parser.add_argument("--city", default='NYC', type=str)
parser.add_argument("--closeness_len", default=12, type=int)
parser.add_argument("--period_len", default=0, type=int)
parser.add_argument("--trend_len", default=0, type=int)
parser.add_argument("--data_range", default="all", type=str)
parser.add_argument("--train_data_length", default="all", type=str)
parser.add_argument("--test_ratio", default=0.1, type=float)
parser.add_argument("--MergeIndex", default=1, type=int)
parser.add_argument("--MergeWay", default="sum", type=str)
parser.add_argument("--threshold_correlation", default=0.7, type=float)

parser.add_argument('--Q', type=int, default=1,
                    help='prediction steps')
parser.add_argument('--L', type=int, default=1,
                    help='number of STAtt Blocks')
parser.add_argument('--K', type=int, default=8,
                    help='number of attention heads')
parser.add_argument('--d', type=int, default=8,
                    help='dims of each head attention outputs')
parser.add_argument('--train_ratio', type=float, default=0.7,
                    help='training set [default : 0.7]')
parser.add_argument('--val_ratio', type=float, default=0.1,
                    help='validation set [default : 0.1]')
parser.add_argument('--batch_size', type=int, default=8,
                    help='batch size')
parser.add_argument('--max_epoch', type=int, default=500,
                    help='epoch to run')
parser.add_argument('--patience', type=int, default=40,
                    help='patience for early stop')
parser.add_argument('--learning_rate', type=float, default=0.001,
                    help='initial learning rate')
parser.add_argument('--decay_epoch', type=int, default=5,
                    help='decay epoch')

# spatial embedding parameters
parser.add_argument('--spatial_is_directed', type=bool, default=False)
parser.add_argument('--spatial_p', type=int, default=2)
parser.add_argument('--spatial_q', type=int, default=1)
parser.add_argument('--spatial_num_walks', type=int, default=100)
parser.add_argument('--spatial_walk_length', type=int, default=80)
parser.add_argument('--spatial_dimensions', type=int, default=32)
parser.add_argument('--spatial_epochs', type=int, default=1000)


args = parser.parse_args()

#config data_loader
data_loader = NodeTrafficLoader(dataset=args.dataset, city=args.city,
                                data_range=args.data_range, train_data_length=args.train_data_length,
                                test_ratio=float(args.test_ratio),
                                closeness_len=args.closeness_len,
                                period_len=args.period_len,
                                trend_len=args.trend_len,
                                normalize=False, remove=False,
                                MergeIndex=args.MergeIndex,
                                MergeWay=args.MergeWay)

args.P = args.closeness_len + args.period_len + args.trend_len

graph_obj = GraphGenerator(graph='correlation', data_loader=data_loader,
                           threshold_distance=args.threshold_correlation)

# Global variable
adj_file = os.path.abspath("./Graph_File/{}_{}_adj.txt".format(args.dataset, args.city))
SE_file = os.path.abspath("./Graph_File/{}_{}_SE.txt".format(args.dataset, args.city))
args.SE_file = SE_file

if not os.path.exists(SE_file):
    # Generate Graph embedding
    graph_to_adj_files(graph_obj.AM[0], adj_file)

    nx_G = read_graph(adj_file)
    G = Graph(nx_G, args.spatial_is_directed, args.spatial_p, args.spatial_q)
    G.preprocess_transition_probs()

    walks = G.simulate_walks(args.spatial_num_walks, args.spatial_walk_length)
    learn_embeddings(walks, args.spatial_dimensions, SE_file, args.spatial_epochs)


model_name = os.path.abspath("model_dir/{}_{}_{}/".format(args.dataset, args.city, args.MergeIndex))
if not os.path.exists(model_name):
    os.makedirs(model_name)
args.model_file = model_name
print("model_name:", args.model_file)

log_file = os.path.abspath("log/{}_{}_{}.txt".format(args.dataset, args.city, args.MergeIndex))
if not os.path.exists(os.path.dirname(log_file)):
    os.makedirs(os.path.dirname(log_file))
args.log_file = log_file
print("log_file:", args.log_file)


start = time.time()
log = open(args.log_file, 'w')
log_string(log, str(args)[10: -1])

# load data
log_string(log, 'loading data...')

(trainX, trainTE, trainY, valX, valTE, valY, testX, testTE, testY,
 SE,time_fitness) = load_data(args, data_loader)


log_string(log, 'trainX: %s\ttrainY: %s' % (trainX.shape, trainY.shape))
log_string(log, 'valX:   %s\t\tvalY:   %s' % (valX.shape, valY.shape))
log_string(log, 'testX:  %s\t\ttestY:  %s' % (testX.shape, testY.shape))
log_string(log, 'data loaded!')

# Train and Test
X, TE, label, is_training, saver, sess, train_op, loss, pred = build_model(
    log, time_fitness, trainX, args,SE)

train_prediction, val_prediction = Train(
    log, args, trainX, trainY, trainTE, valX, valTE, valY, X, TE, label, is_training, saver, sess, train_op, loss, pred)

test_prediction = Test(log, args, testX, testTE, X,
                       TE, is_training, sess, pred)
test_prediction = data_loader.normalizer.inverse_transform(test_prediction)
y_true = data_loader.normalizer.inverse_transform(data_loader.test_y)
test_rmse = metric.rmse(prediction=test_prediction.squeeze(),
                        target=y_true.squeeze())

print("Test RMSE:", test_rmse)


================================================
FILE: Experiments/GMAN/Runner.py
================================================
import os

import os

#############################################
# BenchMark Bike
#############################################
# ########### NYC ########### 
# # # os.system("python GMAN.py --dataset Bike --city NYC --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset Bike --city NYC --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# # os.system("python GMAN.py --dataset Bike --city NYC --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")


# # # # # ########### Chicago ###########
# # # # # # os.system("python GMAN.py --dataset Bike --city Chicago --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset Bike --city Chicago --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# # os.system("python GMAN.py --dataset Bike --city Chicago --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")


# # # # # ########### DC ###########
# # # # # # os.system("python GMAN.py --dataset Bike --city DC --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset Bike --city DC --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# # os.system("python GMAN.py --dataset Bike --city DC --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")



# # # # # # ###############################################
# # # # # # # BenchMark DiDi
# # # # # # ###############################################
# # # # # # ############# Xian #############
# # # # # # # os.system("python GMAN.py --dataset DiDi --city Xian --MergeIndex 3 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset DiDi --city Xian --MergeIndex 6 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# # # # os.system("python GMAN.py --dataset DiDi --city Xian --MergeIndex 12 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# # # # # # ############# Chengdu #############
# # # # # # # os.system("python GMAN.py --dataset DiDi --city Chengdu --MergeIndex 3 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset DiDi --city Chengdu --MergeIndex 6 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset DiDi --city Chengdu --MergeIndex 12 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")



# # # # # # ###############################################
# # # # # # # BenchMark Metro
# # # # # # ###############################################
# # # # # # ############# Chongqing #############
# # # # # # # os.system("python GMAN.py --dataset Metro --city Chongqing --MergeIndex 3 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset Metro --city Chongqing --MergeIndex 6 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# # # os.system("python GMAN.py --dataset Metro --city Chongqing --MergeIndex 12 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")


# # # # # # ############# Shanghai #############
# # # # # # # os.system("python GMAN.py --dataset Metro --city Shanghai --MergeIndex 3 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset Metro --city Shanghai --MergeIndex 6 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset Metro --city Shanghai --MergeIndex 12 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")



# # # # # ###############################################
# # # # # # BenchMark ChargeStation
# # # # # ###############################################

# os.system("python GMAN.py --dataset ChargeStation --city Beijing --MergeIndex 1 --MergeWay max --closeness_len 12 --period_len 0 --trend_len 0")

# # # os.system("python GMAN.py --dataset ChargeStation --city Beijing --MergeIndex 2 --MergeWay max --closeness_len 12 --period_len 0 --trend_len 0")



# # # # # # ###############################################
# # # # # # # BenchMark METR-LA
# # # # # # ###############################################

# # # # # # # os.system("python GMAN.py --dataset METR --city LA --MergeIndex 3 --MergeWay average --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset METR --city LA --MergeIndex 6 --MergeWay average --closeness_len 12 --period_len 0 --trend_len 0")

os.system("python GMAN.py --dataset METR --city LA --MergeIndex 12 --MergeWay average --closeness_len 12 --period_len 0 --trend_len 0")


# # # # # # ###############################################
# # # # # # # BenchMark PEMS-BAY
# # # # # # ###############################################
# # # # # # # os.system("python GMAN.py --dataset PEMS --city BAY --MergeIndex 3 --MergeWay average --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset PEMS --city BAY --MergeIndex 6 --MergeWay average --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset PEMS --city BAY --MergeIndex 12 --MergeWay average --closeness_len 12 --period_len 0 --trend_len 0")


# # # ###############################################
# # # # BenchMark DiDi_Street
# # # ###############################################
# # # ############# Xian_Street #############
# os.system("python GMAN.py --dataset DiDi --city Xian_Street --MergeIndex 3 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset DiDi --city Xian_Street --MergeIndex 6 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset DiDi --city Xian_Street --MergeIndex 12 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# ############ Chengdu_Street #############
# os.system("python GMAN.py --dataset DiDi --city Chengdu_Street --MergeIndex 3 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset DiDi --city Chengdu_Street --MergeIndex 6 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")

# os.system("python GMAN.py --dataset DiDi --city Chengdu_Street --MergeIndex 12 --MergeWay sum --closeness_len 12 --period_len 0 --trend_len 0")


================================================
FILE: Experiments/GraphWaveNet/GraphWaveNet.py
================================================
import torch
import argparse
import time
import os

from UCTB.utils.utils_GraphWaveNet import *
from UCTB.preprocess.GraphGenerator import GraphGenerator
from UCTB.dataset import NodeTrafficLoader, data_loader
from UCTB.evaluation import metric


parser = argparse.ArgumentParser()
parser.add_argument('--device', type=str, default='cuda:0', help='')
parser.add_argument('--data', type=str, default='data/METR-LA', help='data path')
parser.add_argument('--adjdata', type=str, default='data/sensor_graph/adj_mx.pkl', help='adj data path')
parser.add_argument('--adjtype', type=str, default='doubletransition', help='adj type')
parser.add_argument('--gcn_bool', action='store_true', help='whether to add graph convolution layer')
parser.add_argument('--aptonly', action='store_true', help='whether only adaptive adj')
parser.add_argument('--addaptadj', action='store_true', help='whether add adaptive adj')
parser.add_argument('--randomadj', action='store_true', help='whether random initialize adaptive adj')
parser.add_argument('--seq_length', type=int, default=1, help='')
parser.add_argument('--nhid', type=int, default=32, help='')
parser.add_argument('--in_dim', type=int, default=1, help='inputs dimension')
parser.add_argument('--num_nodes', type=int, default=207, help='number of nodes')
parser.add_argument('--batch_size', type=int, default=32, help='batch size')
parser.add_argument('--learning_rate', type=float, default=0.001, help='learning rate')
parser.add_argument('--dropout', type=float, default=0.3, help='dropout rate')
parser.add_argument('--weight_decay', type=float, default=0.0001, help='weight decay rate')
parser.add_argument('--epochs', type=int, default=100, help='')
parser.add_argument('--print_every', type=int, default=50, help='')
# parser.add_argument('--seed',type=int,default=99,help='random seed')
parser.add_argument('--save', type=str, default='./garage/metr', help='save path')
parser.add_argument('--expid', type=int, default=1, help='experiment id')
# data parameters
parser.add_argument("--dataset", default='DiDi', type=str, help="configuration file path")
parser.add_argument("--city", default='Xian', type=str)
parser.add_argument("--closeness_len", default=6, type=int)
parser.add_argument("--period_len", default=7, type=int)
parser.add_argument("--trend_len", default=4, type=int)
parser.add_argument("--data_range", default="all", type=str)
parser.add_argument("--train_data_length", default="all", type=str)
parser.add_argument("--test_ratio", default=0.1, type=float)
parser.add_argument("--MergeIndex", default=1, type=int)
parser.add_argument("--MergeWay", default="sum", type=str)

args = parser.parse_args()

# loading data
uctb_data_loader = NodeTrafficLoader(dataset=args.dataset, city=args.city,
                                     data_range=args.data_range, train_data_length=args.train_data_length,
                                     test_ratio=float(args.test_ratio),
                                     closeness_len=args.closeness_len,
                                     period_len=args.period_len,
                                     trend_len=args.trend_len,
                                     normalize=False,
                                     MergeIndex=args.MergeIndex,
                                     MergeWay=args.MergeWay)

args.num_nodes = uctb_data_loader.station_number
args.in_dim = uctb_data_loader.closeness_len + uctb_data_loader.period_len + uctb_data_loader.trend_len
args.seq_length = 1
args.save = os.path.abspath('./experiment/{}_{}_{}'.format(args.dataset, args.city, args.MergeIndex))
if not os.path.exists(args.save):
    os.makedirs(args.save)

# Build Graph
graph_obj = GraphGenerator(graph='distance', data_loader=uctb_data_loader)


device = torch.device(args.device)
data_dict = load_dataset(uctb_data_loader, args.batch_size, args.batch_size, args.batch_size)


    
supports = [torch.tensor(graph_obj.AM[i]).to(device) for i in range(len(graph_obj.AM))]

print(args)
t1 = time.time()
if args.randomadj:
    adjinit = None
else:
    adjinit = supports[0]
if args.aptonly:
    supports = None
engine = trainer(args.in_dim, args.seq_length, args.num_nodes, args.nhid, args.dropout,
                 args.learning_rate, args.weight_decay, device, supports, args.gcn_bool, args.addaptadj,
                 adjinit)

epoch_id, loss_id = Training(args, data_dict, device, engine)

print("epoch_id:", epoch_id, "loss_id:", loss_id)

test_prediction = Test(args, data_dict, device, engine,  epoch_id, loss_id)
test_prediction = uctb_data_loader.normalizer.inverse_transform(test_prediction)
y_true = uctb_data_loader.normalizer.inverse_transform(uctb_data_loader.test_y)
rmse_result = metric.rmse(prediction=test_prediction.squeeze(),
                        target=y_true.squeeze())
print("Test RMSE:", rmse_result)

t2 = time.time()
print("Total time spent: {:.4f}".format(t2 - t1))




================================================
FILE: Experiments/GraphWaveNet/Runner.py
================================================
import os

#############################################
# BenchMark Bike
#############################################
########### NYC ###########
# os.system("python GraphWaveNet.py --dataset Bike --city NYC --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset Bike --city NYC --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset Bike --city NYC --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")


# ########### Chicago ###########
os.system("python GraphWaveNet.py --dataset Bike --city Chicago --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# # os.system("python GraphWaveNet.py --dataset Bike --city Chicago --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset Bike --city Chicago --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")


# ########### DC ###########
# # os.system("python GraphWaveNet.py --dataset Bike --city DC --data_range 0.25 --train_data_length 91 --MergeIndex 3 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# # os.system("python GraphWaveNet.py --dataset Bike --city DC --data_range 0.5 --train_data_length 183 --MergeIndex 6 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset Bike --city DC --data_range all --train_data_length 365 --MergeIndex 12 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")



# ###############################################
# # BenchMark DiDi
# ###############################################
# ############# Xian #############
# # os.system("python GraphWaveNet.py --dataset DiDi --city Xian --MergeIndex 3 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# # os.system("python GraphWaveNet.py --dataset DiDi --city Xian --MergeIndex 6 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset DiDi --city Xian --MergeIndex 12 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# ############# Chengdu #############
# # os.system("python GraphWaveNet.py --dataset DiDi --city Chengdu --MergeIndex 3 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# # os.system("python GraphWaveNet.py --dataset DiDi --city Chengdu --MergeIndex 6 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset DiDi --city Chengdu --MergeIndex 12 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")



# ###############################################
# # BenchMark Metro
# ###############################################
# ############# Chongqing #############
# # os.system("python GraphWaveNet.py --dataset Metro --city Chongqing --MergeIndex 3 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# # os.system("python GraphWaveNet.py --dataset Metro --city Chongqing --MergeIndex 6 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset Metro --city Chongqing --MergeIndex 12 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")


# ############# Shanghai #############
# # os.system("python GraphWaveNet.py --dataset Metro --city Shanghai --MergeIndex 3 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# # os.system("python GraphWaveNet.py --dataset Metro --city Shanghai --MergeIndex 6 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset Metro --city Shanghai --MergeIndex 12 --MergeWay sum --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")



# ###############################################
# # BenchMark ChargeStation
# ###############################################

# os.system("python GraphWaveNet.py --dataset ChargeStation --city Beijing --MergeIndex 1 --MergeWay max --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset ChargeStation --city Beijing --MergeIndex 2 --MergeWay max --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")


# ###############################################
# # BenchMark METR-LA
# ###############################################

# # os.system("python GraphWaveNet.py --dataset METR --city LA --MergeIndex 3 --MergeWay average --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# # os.system("python GraphWaveNet.py --dataset METR --city LA --MergeIndex 6 --MergeWay average --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset METR --city LA --MergeIndex 12 --MergeWay average --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")


# ###############################################
# # BenchMark PEMS-BAY
# ###############################################
# # os.system("python GraphWaveNet.py --dataset PEMS --city BAY --MergeIndex 3 --MergeWay average --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# # os.system("python GraphWaveNet.py --dataset PEMS --city BAY --MergeIndex 6 --MergeWay average --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")

# os.system("python GraphWaveNet.py --dataset PEMS --city BAY --MergeIndex 12 --MergeWay average --gcn_bool --adjtype doubletransition --addaptadj  --randomadj")


================================================
FILE: Experiments/HM/HM.py
================================================
from UCTB.dataset import NodeTrafficLoader
from UCTB.model import HM
import argparse
from UCTB.evaluation import metric
from UCTB.preprocess import SplitData
import nni
import os


params = {
    'CT': 0,
    'PT': 0,
    'TT': 4,
}

params.update(nni.get_next_parameter())


# acquire data source path
parser = argparse.ArgumentParser(description="Argument Parser")
parser.add_argument('--dataset', default='Metro', type=str)
parser.add_argument('--city', default="Shanghai", type=str)
parser.add_argument('--MergeIndex', default=1)
parser.add_argument('--DataRange', default="all")
parser.add_argument('--TrainDays', default="all")
parser.add_argument('--MergeWay', default="sum")
parser.add_argument('--test_ratio', default=0.1, type=float)

# note that the args is different from param
args = vars(parser.parse_args())


data_loader = NodeTrafficLoader(dataset=args["dataset"], city=args['city'], closeness_len=int(params['CT']), period_len=int(params['PT']), trend_len=int(params['TT']),
                                data_range=args['DataRange'], train_data_length=args['TrainDays'],
                                test_ratio=args['test_ratio'],
                                with_lm=False, normalize=False, MergeIndex=args['MergeIndex'],
                                MergeWay=args['MergeWay'])

train_closeness, val_closeness = SplitData.split_data(
    data_loader.train_closeness, [0.9, 0.1])
train_period, val_period = SplitData.split_data(
    data_loader.train_period, [0.9, 0.1])
train_trend, val_trend = SplitData.split_data(
    data_loader.train_trend, [0.9, 0.1])


train_y, val_y = SplitData.split_data(data_loader.train_y, [0.9, 0.1])


hm_obj = HM(c=data_loader.closeness_len,
            p=data_loader.period_len, t=data_loader.trend_len)


test_prediction = hm_obj.predict(closeness_feature=data_loader.test_closeness,
                                 period_feature=data_loader.test_period,
                                 trend_feature=data_loader.test_trend)

val_prediction = hm_obj.predict(closeness_feature=val_closeness,
                                period_feature=val_period,
                                trend_feature=val_trend)


print('Test RMSE', metric.rmse(test_prediction, data_loader.test_y))
print('Val RMSE', metric.rmse(val_prediction, val_y))


nni.report_final_result({'default': metric.rmse(val_prediction, val_y),
                         'test-rmse': metric.rmse(test_prediction, data_loader.test_y)})


================================================
FILE: Experiments/HM/hm_closeness_search_space.json
================================================
{
    "CT": {"_type": "choice", "_value": [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24]},
    "PT": {"_type": "choice", "_value": [0]},
    "TT": {"_type": "choice", "_value": [0]}
}

================================================
FILE: Experiments/HM/hm_config.yml
================================================
authorName: lychen
experimentName: hm_parameter_search
trialConcurrency: 2
maxExecDuration: 24h
maxTrialNum: 24
trainingServicePlatform: local
# The path to Search Space
searchSpacePath: hm_closeness_search_space.json
useAnnotation: false
tuner:
  builtinTunerName: TPE
# The path and the running command of trial
trial:
  # python HM.py --dataset Metro --city Chongqing --MergeIndex 12 --MergeWay sum --DataRange all --TrainDays all
  # python HM.py --dataset DiDi --city Xian --MergeIndex 12 --MergeWay sum --DataRange all --TrainDays all
  # python HM.py --dataset Bike --city NYC --MergeIndex 12 --MergeWay sum --DataRange all --TrainDays 365
  # python HM.py --dataset ChargeStation --city Beijing --MergeIndex 1 --MergeWay max --DataRange all --TrainDays all
  # python HM.py --dataset Taxi --city BJ --MergeIndex 2 --MergeWay sum --DataRange all --TrainDays all
  # python HM.py --dataset METR --city LA --MergeIndex 3 --MergeWay average --DataRange all --TrainDays all --test_ratio 0.2
  # python HM.py --dataset PEMS --city BAY --MergeIndex 3 --MergeWay average --DataRange all --TrainDays all --test_ratio 0.2
  # python HM.py --dataset PEMS --city BAY --MergeIndex 1 --MergeWay average --DataRange all --TrainDays all --test_ratio 0.2
  # python HM.py --dataset DiDi --city Chengdu_Street --MergeIndex 12 --MergeWay sum --DataRange all --TrainDays all
  command: python HM.py --dataset DiDi --city Xian_Street --MergeIndex 12 --MergeWay sum --DataRange all --TrainDays all

  #0.25,train_data_length:91,graph:Distance,MergeIndex:3
  codeDir: .
  gpuNum: 0

================================================
FILE: Experiments/HM/hm_search_space.json
================================================
{
    "CT": {"_type": "choice", "_value": [0,1,2,3,4,5,6]},
    "PT": {"_type": "choice", "_value": [0,1,2,3,4,5,6,7]},
    "TT": {"_type": "choice", "_value": [0,1,2,3,4]}
}

================================================
FILE: Experiments/HMM/HMM.py
================================================
import nni
import argparse

from UCTB.model import HMM
from UCTB.dataset import NodeTrafficLoader
from UCTB.evaluation import metric
from UCTB.preprocess import SplitData


parser = argparse.ArgumentParser(description="Argument Parser")
# data source
parser.add_argument('--Dataset', default='Bike')
parser.add_argument('--City', default='DC')
# network parameter
parser.add_argument('--CT', default='6', type=int)
parser.add_argument('--PT', default='7', type=int)
parser.add_argument('--TT', default='4', type=int)

parser.add_argument('--DataRange', default='All')
parser.add_argument('--TrainDays', default='365')

parser.add_argument('--num_components', type=int, default=8)
parser.add_argument('--n_iter', type=int, default=365)


args = vars(parser.parse_args())

nni_params = nni.get_next_parameter()
nni_sid = nni.get_sequence_id()
if nni_params:
    args.update(nni_params)
    args['CodeVersion'] += str(nni_sid)


data_loader = NodeTrafficLoader(dataset=args['Dataset'], city=args['City'],
                                closeness_len=args['CT'], period_len=args['PT'], trend_len=args['TT'],
                                with_lm=False, with_tpe=False, normalize=False)

model = HMM(num_components=args['num_components'], n_iter=args['n_iter'])

train_closeness, val_closeness = SplitData.split_data(data_loader.train_closeness, [0.9, 0.1])
train_period, val_period = SplitData.split_data(data_loader.train_period, [0.9, 0.1])
train_trend, val_trend = SplitData.split_data(data_loader.train_trend, [0.9, 0.1])

train_label, val_label = SplitData.split_data(data_loader.train_y, [0.9, 0.1])

model.fit(X=(train_closeness, train_period, train_trend), y=train_label)

val_results = model.predict(X=(val_closeness, val_period, val_trend))
test_results = model.predict(X=(data_loader.test_closeness, data_loader.test_period, data_loader.test_trend))

val_rmse = metric.rmse(val_results, val_label)
test_rmse = metric.rmse(test_results, data_loader.test_y)

print(args['Dataset'], args['City'], 'val_rmse', val_rmse)
print(args['Dataset'], args['City'], 'test_rmse', test_rmse)

================================================
FILE: Experiments/HMM/trials.py
================================================
import os

from UCTB.utils import multiple_process


def task_func(share_queue, locker, data, parameters):

    print('Child process %s with pid %s' % (parameters[0], os.getpid()))

    for task in data:
        print('Child process', parameters[0], 'running', task)
        exec_str = 'python HMM.py --Dataset %s --City %s ' % (task[0], task[1])
        if task[2] != '':
            exec_str += task[2]
        os.system(exec_str)

    locker.acquire()
    share_queue.put(None)
    locker.release()


if __name__ == '__main__':

    task_list = [
        ['Bike', 'NYC', ''],
        ['Bike', 'Chicago', ''],
        ['Bike', 'DC', ''],
        ['Metro', 'Chongqing', ''],
        ['Metro', 'Shanghai', ''],
        ['DiDi', 'Chengdu', ''],
        ['DiDi', 'Xian', ''],
        ['ChargeStation', 'Beijing', '']
    ]

    n_jobs = 2

    multiple_process(distribute_list=task_list,
                     partition_func=lambda data, i, n_job: [data[e] for e in range(len(data)) if e % n_job == i],
                     task_func=task_func, n_jobs=n_jobs,
                     reduce_func=lambda x, y: None, parameters=[])



================================================
FILE: Experiments/MTGNN/MTGNN.py
================================================
import argparse
from UCTB.dataset import NodeTrafficLoader
import os
from UCTB.utils.utils_MTGNN import load_dataset
from UCTB.preprocess.GraphGenerator import GraphGenerator
from UCTB.dataset import NodeTrafficLoader
# from UCTB.evaluation import metric
from UCTB.utils.utils_MTGNN import *
from UCTB.model.MTGNN import gtnet
import pdb
import time
def str_to_bool(value):
    if isinstance(value, bool):
        return value
    if value.lower() in {'false', 'f', '0', 'no', 'n'}:
        return False
    elif value.lower() in {'true', 't', '1', 'yes', 'y'}:
        return True
    raise ValueError(f'{value} is not a valid boolean value')

parser = argparse.ArgumentParser(description='PyTorch Time series forecasting')

parser.add_argument('--device',type=str,default='cuda:0',help='')
parser.add_argument('--data',type=str,default='data/METR-LA',help='data path')

parser.add_argument('--adj_data', type=str,default='data/sensor_graph/adj_mx.pkl',help='adj data path')
parser.add_argument('--gcn_true', type=str_to_bool, default=True, help='whether to add graph convolution layer')
parser.add_argument('--buildA_true', type=str_to_bool, default=True,help='whether to construct adaptive adjacency matrix')
parser.add_argument('--load_static_feature', type=str_to_bool, default=False,help='whether to load static feature')
parser.add_argument('--cl', type=str_to_bool, default=True,help='whether to do curriculum learning')

parser.add_argument('--gcn_depth',type=int,default=2,help='graph convolution depth')
parser.add_argument('--num_nodes',type=int,default=207,help='number of nodes/variables')
parser.add_argument('--dropout',type=float,default=0.3,help='dropout rate')
parser.add_argument('--subgraph_size',type=int,default=20,help='k')
parser.add_argument('--node_dim',type=int,default=40,help='dim of nodes')
parser.add_argument('--dilation_exponential',type=int,default=1,help='dilation exponential')

parser.add_argument('--conv_channels',type=int,default=32,help='convolution channels')
parser.add_argument('--residual_channels',type=int,default=32,help='residual channels')
parser.add_argument('--skip_channels',type=int,default=64,help='skip channels')
parser.add_argument('--end_channels',type=int,default=128,help='end channels')


parser.add_argument('--in_dim',type=int,default=1,help='inputs dimension')
parser.add_argument('--seq_in_len',type=int,default=12,help='input sequence length')
parser.add_argument('--seq_out_len',type=int,default=12,help='output sequence length')

parser.add_argument('--layers',type=int,default=3,help='number of layers')
parser.add_argument('--batch_size',type=int,default=32,help='batch size')
parser.add_argument('--learning_rate',type=float,default=0.001,help='learning rate')
parser.add_argument('--weight_decay',type=float,default=0.0001,help='weight decay rate')
parser.add_argument('--clip',type=int,default=5,help='clip')
parser.add_argument('--step_size1',type=int,default=2500,help='step_size')
parser.add_argument('--step_size2',type=int,default=100,help='step_size')


parser.add_argument('--epochs',type=int,default=100,help='')
parser.add_argument('--print_every',type=int,default=50,help='')
parser.add_argument('--seed',type=int,default=101,help='random seed')
parser.add_argument('--save',type=str,default='./save/',help='save path')
parser.add_argument('--expid',type=int,default=1,help='experiment id')

parser.add_argument('--propalpha',type=float,default=0.05,help='prop alpha')
parser.add_argument('--tanhalpha',type=float,default=3,help='adj alpha')

parser.add_argument('--num_split',type=int,default=1,help='number of splits for graphs')

parser.add_argument('--runs',type=int,default=10,help='number of runs')

# data parameters
parser.add_argument("--dataset", default='METR', type=str, help="configuration file path")
parser.add_argument("--city", default='LA', type=str)
parser.add_argument("--closeness_len", default=6, type=int)
parser.add_argument("--period_len", default=7, type=int)
parser.add_argument("--trend_len", default=4, type=int)
parser.add_argument("--data_range", default="all", type=str)
parser.add_argument("--train_data_length", default="all", type=str)
parser.add_argument("--test_ratio", default=0.2, type=float)
parser.add_argument("--MergeIndex", default=6, type=int)
parser.add_argument("--MergeWay", default="average", type=str)

args = parser.parse_args()

# loading data
uctb_data_loader = NodeTrafficLoader(dataset=args.dataset, city=args.city,data_dir='data',
                                     data_range=args.data_range, train_data_length=args.train_data_length,
                                     test_ratio=float(args.test_ratio),
                                     closeness_len=args.closeness_len,
                                     period_len=args.period_len,
                                     trend_len=args.trend_len,
                                     normalize=True,
                                     MergeIndex=args.MergeIndex,
                                     MergeWay=args.MergeWay)
# pdb.set_trace()
args.num_nodes = uctb_data_loader.station_number
args.seq_in_len = uctb_data_loader.closeness_len + uctb_data_loader.period_len + uctb_data_loader.trend_len
args.seq_out_len = 1
args.save = os.path.abspath('./experiment/MTGNN_{}_{}_{}/'.format(args.dataset, args.city, args.MergeIndex))
if not os.path.exists(args.save):
    os.makedirs(args.save)
    
# Build Graph
# graph_obj = GraphGenerator(graph='distance', data_loader=uctb_data_loader)


device = torch.device(args.device)
data_dict = load_dataset(uctb_data_loader, args.batch_size, args.batch_size, args.batch_size)
# 需要改下
# predefined_A = graph_obj.AM[0]
# predefined_A = torch.tensor(predefined_A)-torch.eye(args.num_nodes)
# predefined_A = predefined_A.to(device)

model = gtnet(args.gcn_true, args.buildA_true, args.gcn_depth, args.num_nodes,
                  device, dropout=args.dropout, subgraph_size=args.subgraph_size,
                  node_dim=args.node_dim, dilation_exponential=args.dilation_exponential,
                  conv_channels=args.conv_channels, residual_channels=args.residual_channels,
                  skip_channels=args.skip_channels, end_channels= args.end_channels,
                  seq_length=args.seq_in_len, in_dim=args.in_dim, out_dim=args.seq_out_len,
                  layers=args.layers, propalpha=args.propalpha, tanhalpha=args.tanhalpha, layer_norm_affline=False)
model = model.to(device)
print(args)
print('The recpetive field size is', model.receptive_field)
nParams = sum([p.nelement() for p in model.parameters()])
print('Number of model parameters is', nParams)
engine = Trainer(model, args.learning_rate, args.weight_decay, args.clip, args.step_size1, args.seq_out_len, device, args.cl)
print("start training...",flush=True)
his_loss =[]
val_time = []
train_time = []
minl = 1e5
# pdb.set_trace()
for i in range(1,args.epochs+1):
    train_loss = []
    train_mape = []
    train_rmse = []
    t1 = time.time()
    data_dict['train_loader'].shuffle()
    for iter, (x, y) in enumerate(data_dict['train_loader'].get_iterator()):
        trainx = torch.Tensor(x).to(device)
        trainx= trainx.transpose(1, 3)
        trainy = torch.Tensor(y).to(device)
        trainy = trainy.transpose(1, 3)
        # pdb.set_trace()
        if iter%args.step_size2==0:
            perm = np.random.permutation(range(args.num_nodes))
        num_sub = int(args.num_nodes/args.num_split)
        for j in range(args.num_split):
            if j != args.num_split-1:
                id = perm[j * num_sub:(j + 1) * num_sub]
            else:
                id = perm[j * num_sub:]
            id = torch.tensor(id).to(device)
            tx = trainx[:, :, id, :]
            ty = trainy[:, :, id, :]
            metrics = engine.train(tx, ty[:,0,:,:],id)
            train_loss.append(metrics[0])
            train_mape.append(metrics[1])
            train_rmse.append(metrics[2])
        if iter % args.print_every == 0 :
            log = 'Iter: {:03d}, Train Loss: {:.4f}, Train MAPE: {:.4f}, Train RMSE: {:.4f}'
            print(log.format(iter, train_loss[-1], train_mape[-1], train_rmse[-1]),flush=True)
    t2 = time.time()
    train_time.append(t2-t1)
    #validation
    valid_loss = []
    valid_mape = []
    valid_rmse = []
    s1 = time.time()
    for iter, (x, y) in enumerate(data_dict['val_loader'].get_iterator()):
        testx = torch.Tensor(x).to(device)
        testx = testx.transpose(1, 3)
        testy = torch.Tensor(y).to(device)
        testy = testy.transpose(1, 3)
        metrics = engine.eval(testx, testy[:,0,:,:])
        valid_loss.append(metrics[0])
        valid_mape.append(metrics[1])
        valid_rmse.append(metrics[2])
    s2 = time.time()
    log = 'Epoch: {:03d}, Inference Time: {:.4f} secs'
    print(log.format(i,(s2-s1)))
    val_time.append(s2-s1)
    mtrain_loss = np.mean(train_loss)
    mtrain_mape = np.mean(train_mape)
    mtrain_rmse = np.mean(train_rmse)
    mvalid_loss = np.mean(valid_loss)
    mvalid_mape = np.mean(valid_mape)
    mvalid_rmse = np.mean(valid_rmse)
    his_loss.append(mvalid_loss)
    log = 'Epoch: {:03d}, Train Loss: {:.4f}, Train MAPE: {:.4f}, Train RMSE: {:.4f}, Valid Loss: {:.4f}, Valid MAPE: {:.4f}, Valid RMSE: {:.4f}, Training Time: {:.4f}/epoch'
    print(log.format(i, mtrain_loss, mtrain_mape, mtrain_rmse, mvalid_loss, mvalid_mape, mvalid_rmse, (t2 - t1)),flush=True)
    if mvalid_loss<minl:
        torch.save(engine.model.state_dict(), args.save + "/exp" + str(args.expid) + '_' + str(i) +'_' + ".pth")
        minl = mvalid_loss
print("Average Training Time: {:.4f} secs/epoch".format(np.mean(train_time)))
print("Average Inference Time: {:.4f} secs".format(np.mean(val_time)))
bestid = np.argmin(his_loss)
engine.model.load_state_dict(torch.load(args.save + "/exp" + str(args.expid) + '_' + str(bestid+1) +'_' + ".pth"))
print("Training finished")
print("The valid loss on best model is", str(round(his_loss[bestid],4)))
# #valid data
# outputs = []
# realy = torch.Tensor(data_dict['y_val']).to(device)
# realy = realy.transpose(1,3)[:,0,:,:]
# for iter, (x, y) in enumerate(data_dict['val_loader'].get_iterator()):
#     testx = torch.Tensor(x).to(device)
#     testx = testx.transpose(1,3)
#     with torch.no_grad():
#         preds = engine.model(testx)
#         preds = preds.transpose(1,3)
#     outputs.append(preds.squeeze())
# yhat = torch.cat(outputs,dim=0)
# yhat = yhat[:realy.size(0),...]
# pred = yhat
# vmae, vmape, vrmse = metric(pred,realy)
#test data
outputs = []
realy = torch.Tensor(data_dict['y_test']).to(device)
realy = realy.transpose(1, 3)[:, 0, :, :]
for iter, (x, y) in enumerate(data_dict['test_loader'].get_iterator()):
    testx = torch.Tensor(x).to(device)
    testx = testx.transpose(1, 3)
    with torch.no_grad():
        preds = engine.model(testx)
        preds = preds.transpose(1, 3)
    outputs.append(preds.squeeze())
y_truth = uctb_data_loader.normalizer.inverse_transform(uctb_data_loader.test_y)
yhat = torch.cat(outputs, dim=0).cpu().numpy()
yhat = yhat[:realy.size(0), ...]
yhat = uctb_data_loader.normalizer.inverse_transform(yhat)
yhat = yhat[...,np.newaxis]
from UCTB.evaluation.metric import rmse,mape,mae
print('Test RMSE',rmse(yhat,y_truth,1))
print('Test MAE',mae(yhat,y_truth,1))
print('Test MAPE',mape(yhat,y_truth,1))
np.save('MTGNN_Bike_Chicago_test_pred',yhat)
np.save('MTGNN_Bike_Chicago_test_truth',y_truth)


================================================
FILE: Experiments/MTGNN/Runner.py
================================================


================================================
FILE: Experiments/MultiStepPredict/Code/DirRec_ARIMA.py
================================================
import time
import numpy as np
import pandas as pd
from sklearn.metrics import mean_absolute_error as MAE

from UCTB.model import ARIMA
from UCTB.dataset import NodeTrafficLoader
from UCTB.evaluation import metric


# params
dataset_name = "Bike_NYC"
model_name = "DirRec_ARIMA"
output_path = "../Outputs/"+model_name+"-"+dataset_name

n_pred = 12

data_loader = NodeTrafficLoader(dataset='Bike', city='NYC', closeness_len=24, period_len=0, trend_len=0, target_length=n_pred, with_lm=False, normalize=False)

start = time.time()

test_prediction_collector = []
for i in range(data_loader.station_number):
    try:
        model_obj = ARIMA(time_sequence=data_loader.train_closeness[:, i, -1, 0],
                          order=[6, 0, 1], seasonal_order=[0, 0, 0, 0])
        test_prediction = model_obj.predict(time_sequences=data_loader.test_closeness[:, i, :, 0],
                                            forecast_step=n_pred)
    except Exception as e:
        print('Converge failed with error', e)
        print('Using last as prediction')
        test_prediction = data_loader.test_closeness[:, i, -1:, :]
    test_prediction_collector.append(test_prediction)
    print('Station', i, 'finished')

predict_list = np.array(test_prediction_collector)
predict_list = predict_list.transpose([1, 0, 2])

print('Total time cost is %.3f' % float(time.time()-start))
prediction = predict_list
prediction = np.where(prediction>0, prediction, 0)
target = data_loader.test_y
evaluation_result = pd.DataFrame(columns=["MAE", "RMSE", "MAPE"], index=range(1, n_pred+1))
for i in range(n_pred):
    # reshape
    cur_prediction = prediction[:,:,i]
    cur_target = target[:,:,i]
    # result
    mae = MAE(cur_prediction, cur_target)
    rmse = metric.rmse(cur_prediction, cur_target)
    mape = metric.mape(cur_prediction, cur_target, threshold=0.1)
    # save
    evaluation_result.loc[i+1, "MAE"] = mae
    evaluation_result.loc[i+1, "RMSE"] = rmse
    evaluation_result.loc[i+1, "MAPE"] = mape
    # print
    print("Step %02d, MAE: %.4f, RMSE: %.4f, MAPE:%.4f" % (i+1, mae, rmse, mape))

# save
np.save(output_path + '-prediction.npy', prediction)
np.save(output_path + '-target.npy', target)
evaluation_result.to_csv(output_path + '-evaluation.csv', float_format="%.4f")

================================================
FILE: Experiments/MultiStepPredict/Code/DirRec_DCRNN.py
================================================
import time
import numpy as np
import pandas as pd
from sklearn.metrics import mean_absolute_error as MAE

from UCTB.dataset import NodeTrafficLoader
from UCTB.model import DCRNN
from UCTB.evaluation import metric
from UCTB.preprocess.GraphGenerator import GraphGenerator

class my_data_loader(NodeTrafficLoader):

    def __init__(self, **kwargs):

        super(my_data_loader, self).__init__(**kwargs) 
        
        # generate LM
        graph_obj = GraphGenerator(graph=kwargs['graph'], data_loader=self)
        self.AM = graph_obj.AM
        self.LM = graph_obj.LM

    def diffusion_matrix(self, filter_type='random_walk'):
        def calculate_random_walk_matrix(adjacent_mx):
            d = np.array(adjacent_mx.sum(1))
            d_inv = np.power(d, -1).flatten()
            d_inv[np.isinf(d_inv)] = 0.
            d_mat_inv = np.diag(d_inv)
            random_walk_mx = d_mat_inv.dot(adjacent_mx)
            return random_walk_mx
        assert len(self.AM) == 1

        diffusion_matrix = []
        if filter_type == "random_walk":
            diffusion_matrix.append(calculate_random_walk_matrix(self.AM[0]).T)
        elif filter_type == "dual_random_walk":
            diffusion_matrix.append(calculate_random_walk_matrix(self.AM[0]).T)
            diffusion_matrix.append(calculate_random_walk_matrix(self.AM[0].T).T)
        return np.array(diffusion_matrix, dtype=np.float32)


# params
dataset_name = "Bike_NYC"
model_name = "DirRec_DCRNN"
output_path = "../Outputs/"+model_name+"-"+dataset_name
model_dir = "../Outputs/model_dir"
code_version = model_name+"-"+dataset_name
batch_size = 64
n_pred = 12
gpu_device = '0'

data_loader = my_data_loader(dataset='Bike', city='NYC', train_data_length='365',
                             closeness_len=6, period_len=7, trend_len=4, target_length=n_pred, graph='Correlation', normalize=True)

start = time.time()

diffusion_matrix = data_loader.diffusion_matrix()

# define n_pred model to train
model_list = []
temp_predict = []
temp_trainX = np.concatenate((data_loader.train_trend.transpose([0, 2, 1, 3]), data_loader.train_period.transpose([0, 2, 1, 3]), data_loader.train_closeness.transpose([0, 2, 1, 3])), axis=1)
for i in range(n_pred):
    temp_input_len = data_loader.closeness_len + data_loader.period_len + data_loader.trend_len + i
    temp_code_version = code_version + "-Step_"+ str(i+1)
    if i != 0:
        temp_trainX = np.concatenate((temp_trainX,temp_predict), axis=1)
    temp_model = DCRNN(num_node=data_loader.station_number,
        num_diffusion_matrix=diffusion_matrix.shape[0],
        num_rnn_units=64,
        num_rnn_layers=1,
        max_diffusion_step=2,
        seq_len=temp_input_len,
        use_curriculum_learning=False,
        input_dim=1,
        output_dim=1,
        cl_decay_steps=1000,
        target_len=1,
        lr=1e-4,
        epsilon=1e-3,
        optimizer_name='Adam',
        code_version=temp_code_version,
        model_dir=model_dir,
        gpu_device=gpu_device)
    # tf-graph
    temp_model.build()
    # training
    temp_model.fit(inputs=temp_trainX,
        diffusion_matrix=diffusion_matrix,
        target=data_loader.train_y[:, :, i].reshape([-1, 1, data_loader.station_number, 1]),
        batch_size=batch_size,
        sequence_length=data_loader.train_sequence_len,
        early_stop_length=100,
        max_epoch=1000)
    # save
    model_list.append(temp_model)
    # prediction
    temp_predict = temp_model.predict(
        inputs=temp_trainX,
        diffusion_matrix=diffusion_matrix,
        target=data_loader.train_y[:, :, i].reshape([-1, 1, data_loader.station_number, 1]),
        sequence_length=data_loader.train_sequence_len,
        output_names=['prediction']
    )
    # predict shape is [train_sequence_len, output_dim, station_number]
    temp_predict = temp_predict['prediction']
    temp_predict = temp_predict.reshape((temp_predict.shape[0], temp_predict.shape[1], temp_predict.shape[2], 1))

# use n_pred model to predict n_pred step
predict_list = []
temp_predict = []
temp_testX = np.concatenate((data_loader.test_trend.transpose([0, 2, 1, 3]), data_loader.test_period.transpose([0, 2, 1, 3]), data_loader.test_closeness.transpose([0, 2, 1, 3])), axis=1)
for i in range(n_pred):
    temp_model = model_list[i]
    if i != 0:
        temp_testX = np.concatenate((temp_testX, temp_predict), axis=1)
    temp_predict = temp_model.predict(
        inputs=temp_testX,
        diffusion_matrix=diffusion_matrix,
        target=data_loader.test_y[:, :, i].reshape([-1, 1, data_loader.station_number, 1]),
        sequence_length=data_loader.test_sequence_len,
        output_names=['prediction']
    )
    temp_predict = temp_predict['prediction']
    predict_list.append(temp_predict)
    temp_predict = temp_predict.reshape((temp_predict.shape[0], temp_predict.shape[1], temp_predict.shape[2], 1))

print('Total time cost is %.3f' % float(time.time()-start))

# Evaluation
predict_list = np.array(predict_list)
predict_list = predict_list.transpose([1, 3, 0, 2])
predict_list = predict_list.reshape((predict_list.shape[0], predict_list.shape[1], predict_list.shape[2]))
prediction = data_loader.normalizer.min_max_denormal(predict_list)
prediction = np.where(prediction>0, prediction, 0)
target = data_loader.normalizer.min_max_denormal(data_loader.test_y)

evaluation_result = pd.DataFrame(columns=["MAE", "RMSE", "MAPE"], index=range(1, n_pred+1))
for i in range(n_pred):
    # reshape
    cur_prediction = prediction[:,:,i]
    cur_target = target[:,:,i]
    # result
    mae = MAE(cur_prediction, cur_target)
    rmse = metric.rmse(cur_prediction, cur_target)
    mape = metric.mape(cur_prediction, cur_target, threshold=0.1)
    # save
    evaluation_result.loc[i+1, "MAE"] = mae
    evaluation_result.loc[i+1, "RMSE"] = rmse
    evaluation_result.loc[i+1, "MAPE"] = mape
    # print
    print("Step %02d, MAE: %.4f, RMSE: %.4f, MAPE:%.4f" % (i+1, mae, rmse, mape))

# save
np.save(output_path + '-prediction.npy', prediction)
np.save(output_path + '-target.npy', target)
evaluation_result.to_csv(output_path + '-evaluation.csv', float_format="%.4f")

================================================
FILE: Experiments/MultiStepPredict/Code/DirRec_STMeta.py
================================================
import time
import numpy as np
import pandas as pd
from sklearn.metrics import mean_absolute_error as MAE

from UCTB.dataset import NodeTrafficLoader
from UCTB.model import STMeta
from UCTB.evaluation import metric
from UCTB.preprocess.GraphGenerator import GraphGenerator


# params
dataset_name = "Bike_NYC"
model_name = "DirRec_STMeta"
output_path = "../Outputs/"+model_name+"-"+dataset_name
model_dir = "../Outputs/model_dir"
code_version = model_name+"-"+dataset_name
n_pred = 12
gpu_device = '1'

# Config data loader
data_loader = NodeTrafficLoader(dataset='Bike', city='NYC', closeness_len=6, period_len=7, trend_len=4, target_length=n_pred, normalize=True)

start = time.time()

# Build Graph
graph_obj = GraphGenerator(graph='Correlation', data_loader=data_loader)

# define n_pred model to train
model_list = []
temp_predict = []
temp_closeness_feature = data_loader.train_closeness
for i in range(n_pred):
    temp_closeness_len = data_loader.closeness_len + i
    temp_code_version = code_version + "-Step_"+ str(i+1)
    if i != 0:
        temp_closeness_feature = np.concatenate((temp_closeness_feature, temp_predict), axis=2)
    temp_model = STMeta(closeness_len=temp_closeness_len,
                    period_len=data_loader.period_len,
                    trend_len=data_loader.trend_len,
                    num_node=data_loader.station_number,
                    num_graph=graph_obj.LM.shape[0],
                    external_dim=data_loader.external_dim,
                    code_version=temp_code_version,
                    model_dir=model_dir,
                    gpu_device=gpu_device)
    # Build tf-graph
    temp_model.build()
    # Training
    temp_model.fit(closeness_feature=temp_closeness_feature,
                period_feature=data_loader.train_period,
                trend_feature=data_loader.train_trend,
                laplace_matrix=graph_obj.LM,
                target=data_loader.train_y[:,:,i].reshape((-1, data_loader.station_number, 1)),
                external_feature=data_loader.train_ef,
                sequence_length=data_loader.train_sequence_len,
                auto_load_model = False)
    # save
    model_list.append(temp_model)
    # prediction
    temp_predict = temp_model.predict(closeness_feature=temp_closeness_feature,
                period_feature=data_loader.train_period,
                trend_feature=data_loader.train_trend,
                laplace_matrix=graph_obj.LM,
                target=data_loader.train_y[:,:,i].reshape((-1, data_loader.station_number, 1)),
                external_feature=data_loader.train_ef,
                output_names=['prediction'],
                sequence_length=data_loader.train_sequence_len)
    # predict shape is [sequence_len, station_num, 1]
    temp_predict = temp_predict['prediction']
    temp_predict = temp_predict.reshape((temp_predict.shape[0], temp_predict.shape[1], 1, 1))

# use n_pred model to predict n_pred step
predict_list = []
temp_predict = []
temp_closeness_feature = data_loader.test_closeness
for i in range(n_pred):
    temp_model = model_list[i]
    if i != 0:
        temp_closeness_feature = np.concatenate((temp_closeness_feature, temp_predict), axis=2)
    # prediction
    temp_predict = temp_model.predict(closeness_feature=temp_closeness_feature,
                period_feature=data_loader.test_period,
                trend_feature=data_loader.test_trend,
                laplace_matrix=graph_obj.LM,
                target=data_loader.test_y[:,:,i].reshape((-1, data_loader.station_number, 1)),
                external_feature=data_loader.test_ef,
                output_names=['prediction'],
                sequence_length=data_loader.test_sequence_len)
    temp_predict = temp_predict['prediction']
    # predict shape is [sequence_len, station_num, 1]
    predict_list.append(temp_predict)
    temp_predict = temp_predict.reshape((temp_predict.shape[0], temp_predict.shape[1], 1, 1))

print('Total time cost is %.3f' % float(time.time()-start))

# Evaluation
predict_list = np.concatenate(predict_list, axis=2)
prediction = data_loader.normalizer.min_max_denormal(predict_list)
prediction = np.where(prediction>0, prediction, 0)
target = data_loader.normalizer.min_max_denormal(data_loader.test_y)

evaluation_result = pd.DataFrame(columns=["MAE", "RMSE", "MAPE"], index=range(1, n_pred+1))
for i in range(n_pred):
    # reshape
    cur_prediction = prediction[:,:,i]
    cur_target = target[:,:,i]
    # result
    mae = MAE(cur_prediction, cur_target)
    rmse = metric.rmse(cur_prediction, cur_target)
    mape = metric.mape(cur_prediction, cur_target, threshold=0.1)
    # save
    evaluation_result.loc[i+1, "MAE"] = mae
    evaluation_result.loc[i+1, "RMSE"] = rmse
    evaluation_result.loc[i+1, "MAPE"] = mape
    # print
    print("Step %02d, MAE: %.4f, RMSE: %.4f, MAPE:%.4f" % (i+1, mae, rmse, mape))

# save
np.save(output_path + '-prediction.npy', prediction)
np.save(output_path + '-target.npy', target)
evaluation_result.to_csv(output_path + '-evaluation.csv', float_format="%.4f")
    


================================================
FILE: Experiments/MultiStepPredict/Code/DirRec_XGBoost.py
================================================
import time
import numpy as np
import pandas as pd
from sklearn.metrics import mean_absolute_error as MAE

from UCTB.dataset import NodeTrafficLoader
from UCTB.model import XGBoost
from UCTB.evaluation import metric


# params
dataset_name = "Bike_NYC"
model_name = "DirRec_XGBoost"
output_path = "../Outputs/"+model_name+"-"+dataset_name
model_dir = "../Outputs/model_dir"
code_version = model_name+"-"+dataset_name
batch_size = 64
n_pred = 12
gpu_device = '0'

data_loader = NodeTrafficLoader(dataset='Bike', city='NYC', closeness_len=6, period_len=7, trend_len=4, target_length=n_pred, with_lm=False, normalize=False)

start = time.time()

# define (station_number * n_pred) model to train
model_list = []
temp_predict = []
node_predict = []
temp_trainX = np.concatenate((data_loader.train_trend, data_loader.train_period, data_loader.train_closeness), axis = 2)
for i in range(n_pred):
    model_list.append([])
    if i != 0:
        temp_trainX = np.concatenate((temp_trainX,temp_predict), axis=2)
    temp_predict = []
    for j in range(data_loader.station_number):
        print('Step %d, Station %d' % (i, j))
        # define
        temp_model = XGBoost(n_estimators=100, max_depth=3, objective='reg:squarederror')
        # train
        temp_model.fit(temp_trainX[:, j, :, 0], data_loader.train_y[:, j, i])
        # save
        model_list[i].append(temp_model)
        # predict
        node_predict = temp_model.predict(temp_trainX[:, j, :, 0])
        temp_predict.append(node_predict.reshape((-1, 1, 1)))
    # temp_predict shape is [sequence_len, station_number, 1]
    temp_predict = np.concatenate(temp_predict, axis=1)
    temp_predict = temp_predict.reshape((temp_predict.shape[0], temp_predict.shape[1], 1, 1))


# 使用H个模型预测得到未来H步
predict_list = []
temp_predict = []
node_predict = []
temp_testX = np.concatenate((data_loader.test_trend, data_loader.test_period, data_loader.test_closeness), axis = 2)
for i in range(n_pred):
    if i != 0:
        temp_testX = np.concatenate((temp_testX,temp_predict), axis=2)
    temp_predict = []
    for j in range(data_loader.station_number):
        temp_model = model_list[i][j]
        node_predict = temp_model.predict(temp_testX[:, j, :, 0])
        temp_predict.append(node_predict.reshape((-1, 1, 1)))
    # temp_predict shape is [sequence_len, station_number, 1]
    temp_predict = np.concatenate(temp_predict, axis=1)
    predict_list.append(temp_predict)
    temp_predict = temp_predict.reshape((temp_predict.shape[0], temp_predict.shape[1], 1, 1))

print('Total time cost is %.3f' % float(time.time()-start))
# Evaluation
predict_list = np.concatenate(predict_list, axis=2)
prediction = np.where(predict_list>0, predict_list, 0)
target = data_loader.test_y
evaluation_result = pd.DataFrame(columns=["MAE", "RMSE", "MAPE"], index=range(1, n_pred+1))
for i in range(n_pred):
    # reshape
    cur_prediction = prediction[:,:,i]
    cur_target = target[:,:,i]
    # result
    mae = MAE(cur_prediction, cur_target)
    rmse = metric.rmse(cur_prediction, cur_target)
    mape = metric.mape(cur_prediction, cur_target, threshold=0.1)
    # save
    evaluation_result.loc[i+1, "MAE"] = mae
    evaluation_result.loc[i+1, "RMSE"] = rmse
    evaluation_result.loc[i+1, "MAPE"] = mape
    # print
    print("Step %02d, MAE: %.4f, RMSE: %.4f, MAPE:%.4f" % (i+1, mae, rmse, mape))

# save
np.save(output_path + '-prediction.npy', prediction)
np.save(output_path + '-target.npy', target)
evaluation_result.to_csv(output_path + '-evaluation.csv', float_format="%.4f")

================================================
FILE: Experiments/MultiStepPredict/Code/viz.py
================================================
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np

def readMetric(metric_name, eva_dir, horizon_num):
    """
    Read evaluation matrices from multiple files, each row corresponds to a time step, and each column corresponds to an evaluation matrix (MAE\RMSE\MAPE) result
    output: shape is (eva_num, model_num, horizon_num)
    """
    eva_num = len(metric_name)
    model_num = len(eva_dir)
    result = np.zeros((eva_num, horizon_num, model_num))
    for m in range(model_num):
        cur_eva = pd.read_csv(eva_dir[m], header=0, index_col=0).values
        for e in range(eva_num):
            for h in range(horizon_num):
                result[e, h, m] = cur_eva[h, e]
    return result


def plot(eva_metric, metric_name, model_name, dataset_name, horizon_num):
    """
    plot
    """
    eva_num = len(metric_name)
    model_num = len(model_name)
    x = list(range(1, horizon_num+1))
    color_list = ['cornflowerblue', 'mediumorchid', 'forestgreen', 'cyan', 'darkorange', 'chocolate', 'red']
    marker_list = ['^', 'o', 'v', '<', '>', '*', 's']

    # plot
    for e in range(eva_num):
        fig = plt.figure(0, dpi=300, figsize=(8, 5))
        plt.title("multi step "+metric_name[e])
        plt.xlabel('Horizon')
        plt.ylabel(metric_name[e] + ' on '+ dataset_name)
        for m in range(model_num):
            plt.plot(x, eva_metric[e, :, m], marker=marker_list[-m], color=color_list[-m], markersize=8)
        plt.legend(model_name)
        plt.savefig('../Figure/'+ dataset_name + '_' + metric_name[e]+'.png')
        plt.close(0)


if __name__=="__main__":
    # params
    horizon_num = 12
    dataset_name = "Bike_NYC"
    metric_name = ["MAE", "RMSE", "MAPE"]
    # model_name = ["DirRec_ARIMA","DirRec_XGBoost","DirRec_DCRNN","DirRec_STMeta"]
    model_name = ["DirRec_ARIMA","DirRec_XGBoost","DirRec_DCRNN_mini","DirRec_STMeta_mini"]
    eva_dir = list(map(lambda x: "../Outputs/" + x + "-" + dataset_name + "-evaluation.csv", model_name))
    # read and plot
    eva_metric = readMetric(metric_name, eva_dir, horizon_num)
    plot(eva_metric, metric_name, model_name, dataset_name, horizon_num)


================================================
FILE: Experiments/MultiStepPredict/README.md
================================================
## Method

The implementation of multi-step prediction refers to the following survey.

> An N H, Anh D T. Comparison of strategies for multi-step-ahead prediction of time series using neural network[C]//2015 International Conference on Advanced Computing and Applications (ACOMP). IEEE, 2015: 142-149.

Specifically, the paper shows that the multi-step prediction based on the `DirRec` strategy achieves the best results on multiple datasets and time steps. The core strategy of `DirRec` is to use a different model to predict each time step in the future, which augments the input of the next prediction by adding the prediction value of the previous step. The formula description is as follows:
$$
\hat{y}_{N+h}=
\begin{cases}
\hat{f}_{h}(y_{N},...,y_{N-d+1})\ \ &if \ \ h=1 \\
\hat{f}_{h}(\hat{y}_{N+h-1},...,\hat{y}_{N+1},y_{N},...,y_{N-d+1}) & if \ \ h\in \{2,...,H\}
\end{cases}
$$
The architecture is shown below:

<img src="Figure/DirRec_Strategy.png" alt="DirRec_Strategy" style="zoom:70%;" />

## Changes to the original code

### UCTB/Dataset/data_loader

The original `target_length` defaults to 1, and the `target_length` parameter does not take effect during the parameter transfer process, but a constant `1` is always passed as a parameter, so the original lines 106~109 are modified as follows:

```python
        # init move sample obj
        self.st_move_sample = ST_MoveSample(closeness_len=closeness_len,
                                            period_len=period_len,
                                            trend_len=trend_len, target_length=target_length, daily_slots=self.daily_slots)
```

### UCTB/model/ARIMA

The modification of ARIMA is concentrated in the prediction part, the shape returned by the original prediction method in multi-step prediction is `[math.ceil(len(time_sequences) / forecast_step), forecast_step]`. In fact, the output shape we need is `[len(time_sequences) , forecast_step]`, so the `predict` function in the `ARIMA` class under `UCTB.model` has been modified as follows:

```python
    def predict(self, time_sequences, forecast_step=1):
        '''
        Argues:
            time_sequences: The input time_series features.
            forecast_step: The number of predicted future steps. Default: 1
        
        :return: Prediction results with shape of (len(time_sequence)/forecast_step,forecast_step=,1).
        :type: np.ndarray
        '''
        result = []
        """ origin predict method, output shape is [math.ceil(len(time_sequences) / forecast_step), forecast_step]
        for i in range(0, len(time_sequences), forecast_step):
            fs = forecast_step if ((i + forecast_step) < len(time_sequences)) else (len(time_sequences) - i)
            model = sm.tsa.SARIMAX(time_sequences[i], order=self.order, seasonal_order=self.seasonal_order)
            model_res = model.filter(self.model_res.params)
            p = model_res.forecast(fs).reshape([-1, 1])
            result.append(p)
        """
        # new predict method, output shape is [len(time_sequences), forecast_step]
        for i in range(len(time_sequences)):
            model = sm.tsa.SARIMAX(time_sequences[i], order=self.order, seasonal_order=self.seasonal_order)
            model_res = model.filter(self.model_res.params)
            p = model_res.forecast(forecast_step)
            p = p.reshape([-1, forecast_step])
            result.append(p)
        if forecast_step != 1:
            result = np.concatenate(result, axis=0)
        return np.array(result, dtype=np.float32)
```

### UCTB/model/XGBoost

The original parameter name contains spaces, and an error will be reported at runtime. The specific error message is as follows:

> xgboost.core.XGBoostError: [12:34:15] ../src/learner.cc:553: Invalid parameter "verbosity " contains whitespace.

Specifically, modify the original lines 23 to 28 as follows:

```python
        self.param = {
            'max_depth': max_depth,
            'verbosity': verbosity,
            'objective': objective,
            'eval_metric': eval_metric
        }
```

## Run

Set the parameters. Different models will have different input parameters. See the code for details. The common thing is to set the prediction step, and the current experiment is uniformly set to 12.

```python
n_pred = 12
gpu_device = '0'
```

Switch directory

```shell
cd Experiments/MultiStepPredict/Code
```

Run

```Shell
nohup python -u DirRec_STMeta.py >DirRec_STMeta.log 2>&1 &
```

After the execution, it will output `DirRec_STMeta.log` in the current folder, and output `DirRec_STMeta-Bike_NYC-evaluation.csv`, `DirRec_STMeta-Bike_NYC-prediction.npy`, `DirRec_STMeta-Bike_NYC-target.npy` in the `Outputs` directory.

In addition, a visualization method is provided to easily read the evaluation results of different models, and visualize them together for comparison. Execute the following commands, and the visualization results `Bike_NYC_MAE.png`, `Bike_NYC_RMSE.png`,  `Bike_NYC_MAPE.png`will be generated in the `Figure` directory.

```shell
python viz.py
```

================================================
FILE: Experiments/ParameterSearch/ARIMA.py
================================================
import numpy as np

from UCTB.model import ARIMA
from UCTB.dataset import NodeTrafficLoader
from UCTB.evaluation import metric

data_loader = NodeTrafficLoader(dataset='ChargeStation', city='Beijing')

prediction = []

for i in range(data_loader.station_number):

    print('*************************************************************')
    print('Station', i)

    try:
        model_obj = ARIMA(data_loader.train_data[:, i], [30, 0, 2])
        p = model_obj.predict(data_loader.test_x[:, :, i, 0])
    except Exception as e:
        print('Converge failed with error', e)
        print('Using zero as prediction')
        p = np.zeros([data_loader.test_x[:, :, i, 0].shape[0], 1, 1])

    prediction.append(p)

    print(np.concatenate(prediction, axis=-1).shape)

prediction = np.concatenate(prediction, axis=-1)

print('RMSE', metric.rmse(prediction, data_loader.test_y))

================================================
FILE: Experiments/ParameterSearch/CPT_GBRT.py
================================================
import numpy as np
from UCTB.dataset import NodeTrafficLoader
from sklearn.ensemble import GradientBoostingRegressor
from UCTB.evaluation import metric

dataset = 'ChargeStation'
city = 'Beijing'

data_loader = NodeTrafficLoader(dataset=dataset, city=city, with_lm=False,
                                closeness_len=5, period_len=3, trend_len=4, test_ratio=0.1, normalize=False)

prediction = []

for i in range(data_loader.station_number):

    print('*************************************************************')
    print('Station', i)

    model = GradientBoostingRegressor(n_estimators=540, max_depth=3)

    train_x = np.concatenate([data_loader.train_closeness[:, 0, i, :],
                              data_loader.train_period[:, 0, i, :],
                              data_loader.train_trend[:, 0, i, :]], axis=-1)

    test_x = np.concatenate([data_loader.test_closeness[:, 0, i, :],
                             data_loader.test_period[:, 0, i, :],
                             data_loader.test_trend[:, 0, i, :]], axis=-1)

    model.fit(train_x, data_loader.train_y[:, i])

    p = model.predict(test_x).reshape([-1, 1, 1])

    prediction.append(p)

prediction = np.concatenate(prediction, axis=-2)

print(dataset, city, 'RMSE', metric.rmse(prediction, data_loader.test_y))
print(dataset, city, 'MAPE', metric.mape(prediction, data_loader.test_y, threshold=0))


def show_prediction(prediction, target, station_index, start=0, end=-1):

    import matplotlib.pyplot as plt

    # fig, axs = plt.subplots(1, 2, figsize=(9, 3))
    # axs[0].plot(prediction[start:end, station_index])
    # axs[1].plot(target[start:end, station_index])

    plt.plot(prediction[start:end, station_index], 'b')
    plt.plot(target[start:end, station_index], 'r')

    print(metric.rmse(prediction[start:end, station_index], target[start:end, station_index]))

    print(prediction[start:end, station_index].max(), target[start:end, station_index].max())
    print(prediction[start:end, station_index].min(), target[start:end, station_index].min())

    plt.show()

print('Debug')

================================================
FILE: Experiments/ParameterSearch/CPT_HM.py
================================================
import nni

from UCTB.dataset import NodeTrafficLoader
from UCTB.model import HM
from UCTB.evaluation import metric

params = nni.get_next_parameter()

data_loader = NodeTrafficLoader(dataset=params['Dataset'], city=params['City'], with_lm=False, normalize=False, test_ratio=0.1)

test_start_index = data_loader.traffic_data.shape[0] - data_loader.test_data.shape[0]

val_start_index = data_loader.traffic_data.shape[0] - data_loader.test_data.shape[0] * 2

hm_obj = HM(c=int(params['CT']), p=int(params['PT']), t=int(params['TT']))

val_prediction = hm_obj.predict(val_start_index, data_loader.traffic_data[:test_start_index],
                                time_fitness=data_loader.dataset.time_fitness)

test_prediction = hm_obj.predict(test_start_index, data_loader.traffic_data, time_fitness=data_loader.dataset.time_fitness)

val_rmse = metric.rmse(val_prediction, data_loader.traffic_data[val_start_index: test_start_index])

test_rmse = metric.rmse(test_prediction, data_loader.test_data)

print(val_rmse, test_rmse)

nni.report_final_result({
    'default': val_rmse,
    'test-rmse': test_rmse,
})

================================================
FILE: Experiments/ParameterSearch/CPT_STMeta_Obj.py
================================================
import os
import nni
import numpy as np

from UCTB.dataset import NodeTrafficLoader
from UCTB.model import STMeta_V1
from UCTB.evaluation import metric
from UCTB.model_unit import GraphBuilder
from UCTB.preprocess import is_work_day_china

model_dir_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'model_dir')


def cpt_stmeta_param_parser():
    import argparse
    parser = argparse.ArgumentParser(description="Argument Parser")
    # data source
    parser.add_argument('--Dataset', default='DiDi')
    parser.add_argument('--City', default='Xian')
    # network parameter
    parser.add_argument('--CT', default='6', type=int)
    parser.add_argument('--PT', default='7', type=int)
    parser.add_argument('--TT', default='4', type=int)
    parser.add_argument('--K', default='1', type=int)
    parser.add_argument('--L', default='1', type=int)
    parser.add_argument('--Graph', default='Distance-Interaction-Correlation')
    parser.add_argument('--GLL', default='1', type=int)
    parser.add_argument('--LSTMUnits', default='64', type=int)
    parser.add_argument('--GALUnits', default='64', type=int)
    parser.add_argument('--GALHeads', default='2', type=int)
    parser.add_argument('--DenseUnits', default='32', type=int)
    parser.add_argument('--Normalize', default='True', type=str)
    # Training data parameters
    parser.add_argument('--DataRange', default='All')
    parser.add_argument('--TrainDays', default='All')
    # Graph parameter
    parser.add_argument('--TC', default='0.7', type=float)
    parser.add_argument('--TD', default='3000', type=float)
    parser.add_argument('--TI', default='100', type=float)
    # training parameters
    parser.add_argument('--Epoch', default='5000', type=int)
    parser.add_argument('--Train', default='True', type=str)
    parser.add_argument('--lr', default='1e-4', type=float)
    parser.add_argument('--ESlength', default='200', type=int)
    parser.add_argument('--patience', default='0.1', type=float)
    parser.add_argument('--BatchSize', default='128', type=int)
    # device parameter
    parser.add_argument('--Device', default='0,1', type=str)
    # version control
    parser.add_argument('--Group', default='Xian')
    parser.add_argument('--CodeVersion', default='ParamTuner')
    return parser


parser = cpt_stmeta_param_parser()
args = vars(parser.parse_args())

args.update(nni.get_next_parameter())

model_dir = os.path.join(model_dir_path, args['Group'])
code_version = 'CPT_STMeta_{}_K{}L{}_{}'.format(''.join([e[0] for e in args['Graph'].split('-')]),
                                                      args['K'], args['L'], args['CodeVersion'] + nni.get_sequence_id())

# Config data loader
data_loader = NodeTrafficLoader(dataset=args['Dataset'], city=args['City'],
                                  data_range=args['DataRange'], train_data_length=args['TrainDays'],
                                  test_ratio=0.1,
                                  C_T=int(args['CT']), P_T=int(args['PT']), T_T=int(args['TT']),
                                  TI=args['TI'], TD=args['TD'], TC=args['TC'],
                                  normalize=True if args['Normalize'] == 'True' else False,
                                  graph=args['Graph'], with_lm=True)

de_normalizer = None if args['Normalize'] == 'False' else data_loader.normalizer.min_max_denormal

CPT_STMeta_Obj = STMeta_V1(num_node=data_loader.station_number,
                                  num_graph=data_loader.LM.shape[0],
                                  external_dim=data_loader.external_dim,
                                  C_T=int(args['CT']), P_T=int(args['PT']), T_T=int(args['TT']),
                                  GCN_K=int(args['K']),
                                  GCN_layers=int(args['L']),
                                  GCLSTM_layers=int(args['GLL']),
                                  gal_units=int(args['GALUnits']),
                                  gal_num_heads=int(args['GALHeads']),
                                  num_hidden_units=int(args['LSTMUnits']),
                                  num_filter_conv1x1=int(args['DenseUnits']),
                                  lr=float(args['lr']),
                                  code_version=code_version,
                                  model_dir=model_dir,
                                  GPU_DEVICE=args['Device'])

CPT_STMeta_Obj.build()

print(args['Dataset'], args['City'], code_version)
print('Number of trainable variables', CPT_STMeta_Obj.trainable_vars)

# Training
if args['Train'] == 'True':
    CPT_STMeta_Obj.fit(closeness_feature=data_loader.train_closeness,
                       period_feature=data_loader.train_period,
                       trend_feature=data_loader.train_trend,
                       laplace_matrix=data_loader.LM,
                       target=data_loader.train_y,
                       external_feature=data_loader.train_ef,
                       early_stop_method='t-test',
                       early_stop_length=int(args['ESlength']),
                       early_stop_patience=float(args['patience']),
                       batch_size=int(args['BatchSize']),
                       max_epoch=int(args['Epoch']))

CPT_STMeta_Obj.load(code_version)

# Evaluate
test_error = CPT_STMeta_Obj.evaluate(closeness_feature=data_loader.test_closeness,
                                     period_feature=data_loader.test_period,
                                     trend_feature=data_loader.test_trend,
                                     laplace_matrix=data_loader.LM,
                                     target=data_loader.test_y,
                                     external_feature=data_loader.test_ef,
                                     cache_volume=int(args['BatchSize']),
                                     metrics=[metric.rmse, metric.mape],
                                     de_normalizer=de_normalizer,
                                     threshold=0)

print('Test result', test_error)

val_loss = CPT_STMeta_Obj.load_event_scalar('val_loss')

best_val_loss = min([e[-1] for e in val_loss])

nni.report_final_result({
    'default': best_val_loss,
    'test-rmse': test_error[0],
    'test-mape': test_error[1]
})


================================================
FILE: Experiments/ParameterSearch/CPT_XGBoost.py
================================================
import nni
import numpy as np

from UCTB.dataset import NodeTrafficLoader_CPT, NodeTrafficLoader
from UCTB.model import XGBoost
from UCTB.evaluation import metric

params = {
    'Dataset': 'Bike',
    'City': 'NYC',
    'CT': 6,
    'PT': 0,
    'TT': 0,
    'max_depth': 10,
    'num_boost_round': 150
}

# params.update(nni.get_next_parameter())

data_loader = NodeTrafficLoader_CPT(dataset=params['Dataset'], city=params['City'],
                                    with_lm=False, test_ratio=0.1, normalize=False,
                                    C_T=int(params['CT']), P_T=int(params['PT']), T_T=int(params['TT']))

test_prediction = []
val_prediction = []

for i in range(data_loader.station_number):

    print('*************************************************************')
    print('Station', i)

    model = XGBoost(max_depth=int(params['max_depth']))

    train = []
    test_x = []

    if int(params['CT']) > 0:
        train.append(data_loader.train_closeness[:, 0, i, :])
        test_x.append(data_loader.test_closeness[:, 0, i, :])
    if int(params['PT']) > 0:
        train.append(data_loader.train_period[:, :, i, -1])
        test_x.append(data_loader.test_period[:, :, i, -1])
    if int(params['TT']) > 0:
        train.append(data_loader.train_trend[:, :, i, -1])
        test_x.append(data_loader.test_trend[:, :, i, -1])

    train = np.concatenate(train, axis=-1)

    test_x = np.concatenate(test_x, axis=-1)

    # val has the same length as test
    train_x, val_x = train[:-len(test_x)], train[-len(test_x):]

    train_y, val_y = data_loader.train_y[:-len(test_x), i], data_loader.train_y[-len(test_x):, i]

    model.fit(train_x, train_y, num_boost_round=int(params['num_boost_round']))

    test_p = model.predict(test_x).reshape([-1, 1, 1])
    val_p = model.predict(val_x).reshape([-1, 1, 1])

    test_prediction.append(test_p)
    val_prediction.append(val_p)

test_prediction = np.concatenate(test_prediction, axis=-2)
val_prediction = np.concatenate(val_prediction, axis=-2)

val_rmse = metric.rmse(val_prediction, data_loader.train_y[-len(data_loader.test_y):])
test_rmse = metric.rmse(test_prediction, data_loader.test_y)

# nni.report_final_result({
#     'default': val_rmse,
#     'test-rmse': test_rmse,
# })

================================================
FILE: Experiments/ParameterSearch/config.yml
================================================
authorName: DiChai
experimentName: parameter_search
trialConcurrency: 2
maxExecDuration: 24h
maxTrialNum: 50
trainingServicePlatform: local
# The path to Search Space
searchSpacePath: search_space.json
useAnnotation: false
tuner:
  builtinTunerName: TPE
# The path and the running command of trial
trial:
  command: python STMeta_V0_Obj.py
  codeDir: .
  gpuNum: 1


================================================
FILE: Experiments/ParameterSearch/hm_config.yml
================================================
authorName: DiChai
experimentName: hm_parameter_search
trialConcurrency: 8
maxExecDuration: 24h
maxTrialNum: 200
trainingServicePlatform: local
# The path to Search Space
searchSpacePath: hm_search_space.json
useAnnotation: false
tuner:
  builtinTunerName: TPE
# The path and the running command of trial
trial:
  command: python CPT_HM.py
  codeDir: .
  gpuNum: 0

================================================
FILE: Experiments/ParameterSearch/hm_search_space.json
================================================
{

    "Dataset": {"_type": "choice", "_value": ["Bike"]},
    "City": {"_type": "choice", "_value": ["DC"]},

    "CT": {"_type":"randint","_value":[0, 6]},
    "PT": {"_type":"randint","_value":[0, 7]},
    "TT": {"_type":"randint","_value":[0, 4]}
}

================================================
FILE: Experiments/ParameterSearch/plot_paper.ipynb
================================================
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "with open('FedMF-Full.txt', 'r', encoding='utf-8') as f:\n",
    "    fedmf_full = f.readlines()\n",
    "with open('FedMF-Part.txt', 'r', encoding='utf-8') as f:\n",
    "    fedmf_part = f.readlines()\n",
    "with open('MF.txt', 'r', encoding='utf-8') as f:\n",
    "    mf = f.readlines()\n",
    "    \n",
    "fedmf_full = [e.strip('\\n') for e in fedmf_full if e.startswith('loss')]\n",
    "fedmf_full = [float(e.split(' ')[-1]) for e in fedmf_full]\n",
    "\n",
    "fedmf_part = [e.strip('\\n') for e in fedmf_part if e.startswith('loss')]\n",
    "fedmf_part = [float(e.split(' ')[-1]) for e in fedmf_part]\n",
    "\n",
    "mf = [e.strip('\\n') for e in mf if e.startswith('loss')]\n",
    "mf = [float(e.split(' ')[-1]) for e in mf]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {},
   "outputs": [],
   "source": [
    "fig, axs = plt.subplots()\n",
    "\n",
    "axs.plot(fedmf_full, 'b+-', label='FedMF-Full', linewidth=0.5)\n",
    "axs.plot(fedmf_part, 'g.-', label='FedMF-Part', linewidth=0.5)\n",
    "axs.plot(mf, 'r-', label='Regular-MF', linewidth=0.5)\n",
    "\n",
    "axs.grid()\n",
    "axs.legend(fontsize=15)\n",
    "\n",
    "axs.set_xlabel('Epochs', fontsize=15)\n",
    "axs.set_ylabel('Train Loss', fontsize=15)\n",
    "\n",
    "fig.set_size_inches(10, 5)\n",
    "fig.savefig('%s.png' % 'comparison', dpi=100)\n",
    "plt.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}


================================================
FILE: Experiments/ParameterSearch/results/HM_Bike_NYC.json
================================================
{
    "experimentParameters": {
        "id": "rU6J0i83",
        "revision": 287,
        "execDuration": 854,
        "logDir": "/Users/chaidi/nni/experiments/rU6J0i83",
        "maxSequenceId": 199,
        "params": {
            "authorName": "DiChai",
            "experimentName": "hm_parameter_search",
            "trialConcurrency": 8,
            "maxExecDuration": 86400,
            "maxTrialNum": 200,
            "searchSpace": {
                "Dataset": {
                    "_type": "choice",
                    "_value": [
                        "Bike"
                    ]
                },
                "City": {
                    "_type": "choice",
                    "_value": [
                        "NYC"
                    ]
                },
                "CT": {
                    "_type": "randint",
                    "_value": [
                        0,
                        6
                    ]
                },
                "PT": {
                    "_type": "randint",
                    "_value": [
                        0,
                        7
                    ]
                },
                "TT": {
                    "_type": "randint",
                    "_value": [
                        0,
                        4
                    ]
                }
            },
            "trainingServicePlatform": "local",
            "tuner": {
                "builtinTunerName": "TPE",
                "className": "TPE",
                "checkpointDir": "/Users/chaidi/nni/experiments/rU6J0i83/checkpoint"
            },
            "versionCheck": true,
            "clusterMetaData": [
                {
                    "key": "codeDir",
                    "value": "/Users/chaidi/Documents/UCTB-Package-0.0.5/Experiments/ParameterSearch/."
                },
                {
                    "key": "command",
                    "value": "python CPT_HM.py"
                }
            ]
        },
        "startTime": 1560834878927,
        "endTime": 1560835756258
    },
    "trialMessage": [
        {
            "id": "TVG5A",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 0,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 4,
                    "PT": 7,
                    "TT": 2
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/TVG5A",
            "startTime": 1560834888962,
            "sequenceId": 0,
            "endTime": 1560834916000,
            "finalMetricData": [
                {
                    "timestamp": 1560834914905,
                    "trialJobId": "TVG5A",
                    "parameterId": "0",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.3573408463670296,\"test-rmse\":4.4766049838814865}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834914905,
                    "trialJobId": "TVG5A",
                    "parameterId": "0",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.3573408463670296,\"test-rmse\":4.4766049838814865}"
                }
            ]
        },
        {
            "id": "VRzdk",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 1,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 1,
                    "PT": 6,
                    "TT": 2
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/VRzdk",
            "startTime": 1560834888984,
            "sequenceId": 1,
            "endTime": 1560834915000,
            "finalMetricData": [
                {
                    "timestamp": 1560834914328,
                    "trialJobId": "VRzdk",
                    "parameterId": "1",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.3343664050554174,\"test-rmse\":4.36618297262181}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834914328,
                    "trialJobId": "VRzdk",
                    "parameterId": "1",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.3343664050554174,\"test-rmse\":4.36618297262181}"
                }
            ]
        },
        {
            "id": "lOmsf",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 2,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 4,
                    "PT": 4,
                    "TT": 1
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/lOmsf",
            "startTime": 1560834889005,
            "sequenceId": 2,
            "endTime": 1560834915000,
            "finalMetricData": [
                {
                    "timestamp": 1560834914658,
                    "trialJobId": "lOmsf",
                    "parameterId": "2",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.572369188547026,\"test-rmse\":4.910039558096488}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834914658,
                    "trialJobId": "lOmsf",
                    "parameterId": "2",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.572369188547026,\"test-rmse\":4.910039558096488}"
                }
            ]
        },
        {
            "id": "KMdKS",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 3,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 6,
                    "PT": 4,
                    "TT": 1
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/KMdKS",
            "startTime": 1560834889027,
            "sequenceId": 3,
            "endTime": 1560834916000,
            "finalMetricData": [
                {
                    "timestamp": 1560834915208,
                    "trialJobId": "KMdKS",
                    "parameterId": "3",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.7734189500775583,\"test-rmse\":5.260572896761711}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834915208,
                    "trialJobId": "KMdKS",
                    "parameterId": "3",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.7734189500775583,\"test-rmse\":5.260572896761711}"
                }
            ]
        },
        {
            "id": "nlasg",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 4,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 2,
                    "PT": 4,
                    "TT": 3
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/nlasg",
            "startTime": 1560834889049,
            "sequenceId": 4,
            "endTime": 1560834916000,
            "finalMetricData": [
                {
                    "timestamp": 1560834915414,
                    "trialJobId": "nlasg",
                    "parameterId": "4",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.2491156358278035,\"test-rmse\":4.294810041672708}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834915414,
                    "trialJobId": "nlasg",
                    "parameterId": "4",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.2491156358278035,\"test-rmse\":4.294810041672708}"
                }
            ]
        },
        {
            "id": "kiJSj",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 5,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 1,
                    "PT": 5,
                    "TT": 1
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/kiJSj",
            "startTime": 1560834889076,
            "sequenceId": 5,
            "endTime": 1560834916000,
            "finalMetricData": [
                {
                    "timestamp": 1560834915122,
                    "trialJobId": "kiJSj",
                    "parameterId": "5",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.466012234112674,\"test-rmse\":4.607805766719761}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834915122,
                    "trialJobId": "kiJSj",
                    "parameterId": "5",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.466012234112674,\"test-rmse\":4.607805766719761}"
                }
            ]
        },
        {
            "id": "gLmN4",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 6,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 3,
                    "PT": 7,
                    "TT": 2
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/gLmN4",
            "startTime": 1560834889111,
            "sequenceId": 6,
            "endTime": 1560834916000,
            "finalMetricData": [
                {
                    "timestamp": 1560834915509,
                    "trialJobId": "gLmN4",
                    "parameterId": "6",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.3004494216603684,\"test-rmse\":4.361940866064909}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834915509,
                    "trialJobId": "gLmN4",
                    "parameterId": "6",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.3004494216603684,\"test-rmse\":4.361940866064909}"
                }
            ]
        },
        {
            "id": "dq52J",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 7,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 4,
                    "PT": 0,
                    "TT": 3
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/dq52J",
            "startTime": 1560834889149,
            "sequenceId": 7,
            "endTime": 1560834916000,
            "finalMetricData": [
                {
                    "timestamp": 1560834915194,
                    "trialJobId": "dq52J",
                    "parameterId": "7",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.538208791457419,\"test-rmse\":4.822044041480414}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834915194,
                    "trialJobId": "dq52J",
                    "parameterId": "7",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.538208791457419,\"test-rmse\":4.822044041480414}"
                }
            ]
        },
        {
            "id": "EJ12u",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 8,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 3,
                    "PT": 0,
                    "TT": 2
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/EJ12u",
            "startTime": 1560834929194,
            "sequenceId": 8,
            "endTime": 1560834959000,
            "finalMetricData": [
                {
                    "timestamp": 1560834958287,
                    "trialJobId": "EJ12u",
                    "parameterId": "8",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.5191010428127005,\"test-rmse\":4.81101214229983}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834958287,
                    "trialJobId": "EJ12u",
                    "parameterId": "8",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.5191010428127005,\"test-rmse\":4.81101214229983}"
                }
            ]
        },
        {
            "id": "IS6ox",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 9,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 5,
                    "PT": 2,
                    "TT": 3
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/IS6ox",
            "startTime": 1560834929213,
            "sequenceId": 9,
            "endTime": 1560834959000,
            "finalMetricData": [
                {
                    "timestamp": 1560834958697,
                    "trialJobId": "IS6ox",
                    "parameterId": "9",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.4802802810775693,\"test-rmse\":4.767764316499411}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834958697,
                    "trialJobId": "IS6ox",
                    "parameterId": "9",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.4802802810775693,\"test-rmse\":4.767764316499411}"
                }
            ]
        },
        {
            "id": "j7ezD",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 10,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 6,
                    "PT": 0,
                    "TT": 2
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/j7ezD",
            "startTime": 1560834929233,
            "sequenceId": 10,
            "endTime": 1560834959000,
            "finalMetricData": [
                {
                    "timestamp": 1560834958473,
                    "trialJobId": "j7ezD",
                    "parameterId": "10",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.9931763530716755,\"test-rmse\":5.645536720983396}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834958473,
                    "trialJobId": "j7ezD",
                    "parameterId": "10",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.9931763530716755,\"test-rmse\":5.645536720983396}"
                }
            ]
        },
        {
            "id": "bqWYD",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 11,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 3,
                    "PT": 6,
                    "TT": 1
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/bqWYD",
            "startTime": 1560834929254,
            "sequenceId": 11,
            "endTime": 1560834959000,
            "finalMetricData": [
                {
                    "timestamp": 1560834958536,
                    "trialJobId": "bqWYD",
                    "parameterId": "11",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.4135064227648066,\"test-rmse\":4.593027637181522}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834958536,
                    "trialJobId": "bqWYD",
                    "parameterId": "11",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.4135064227648066,\"test-rmse\":4.593027637181522}"
                }
            ]
        },
        {
            "id": "HUF36",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 12,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 3,
                    "PT": 7,
                    "TT": 3
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/HUF36",
            "startTime": 1560834929277,
            "sequenceId": 12,
            "endTime": 1560834959000,
            "finalMetricData": [
                {
                    "timestamp": 1560834958902,
                    "trialJobId": "HUF36",
                    "parameterId": "12",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.2535923832312394,\"test-rmse\":4.283320320090695}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834958902,
                    "trialJobId": "HUF36",
                    "parameterId": "12",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.2535923832312394,\"test-rmse\":4.283320320090695}"
                }
            ]
        },
        {
            "id": "yF9e5",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 13,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 0,
                    "PT": 0,
                    "TT": 2
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/yF9e5",
            "startTime": 1560834929301,
            "sequenceId": 13,
            "endTime": 1560834958000,
            "finalMetricData": [
                {
                    "timestamp": 1560834957395,
                    "trialJobId": "yF9e5",
                    "parameterId": "13",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.75877939072391,\"test-rmse\":4.573542776546816}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834957395,
                    "trialJobId": "yF9e5",
                    "parameterId": "13",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.75877939072391,\"test-rmse\":4.573542776546816}"
                }
            ]
        },
        {
            "id": "RVjef",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 14,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 3,
                    "PT": 4,
                    "TT": 4
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/RVjef",
            "startTime": 1560834929332,
            "sequenceId": 14,
            "endTime": 1560834959000,
            "finalMetricData": [
                {
                    "timestamp": 1560834958810,
                    "trialJobId": "RVjef",
                    "parameterId": "14",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.2600343679445145,\"test-rmse\":4.31781077879459}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834958810,
                    "trialJobId": "RVjef",
                    "parameterId": "14",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.2600343679445145,\"test-rmse\":4.31781077879459}"
                }
            ]
        },
        {
            "id": "JhLvu",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 15,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 5,
                    "PT": 6,
                    "TT": 2
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/JhLvu",
            "startTime": 1560834929360,
            "sequenceId": 15,
            "endTime": 1560834959000,
            "finalMetricData": [
                {
                    "timestamp": 1560834958937,
                    "trialJobId": "JhLvu",
                    "parameterId": "15",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.4830314962014395,\"test-rmse\":4.72193178462408}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834958937,
                    "trialJobId": "JhLvu",
                    "parameterId": "15",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.4830314962014395,\"test-rmse\":4.72193178462408}"
                }
            ]
        },
        {
            "id": "sJsVd",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 16,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 1,
                    "PT": 1,
                    "TT": 1
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/sJsVd",
            "startTime": 1560834969417,
            "sequenceId": 16,
            "endTime": 1560834998000,
            "finalMetricData": [
                {
                    "timestamp": 1560834997694,
                    "trialJobId": "sJsVd",
                    "parameterId": "16",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.174530800023379,\"test-rmse\":4.231832613503912}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834997694,
                    "trialJobId": "sJsVd",
                    "parameterId": "16",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.174530800023379,\"test-rmse\":4.231832613503912}"
                }
            ]
        },
        {
            "id": "gWvRN",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 17,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 3,
                    "PT": 2,
                    "TT": 3
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/gWvRN",
            "startTime": 1560834969437,
            "sequenceId": 17,
            "endTime": 1560834998000,
            "finalMetricData": [
                {
                    "timestamp": 1560834998068,
                    "trialJobId": "gWvRN",
                    "parameterId": "17",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.2394669627302033,\"test-rmse\":4.3386627034250544}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834998068,
                    "trialJobId": "gWvRN",
                    "parameterId": "17",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.2394669627302033,\"test-rmse\":4.3386627034250544}"
                }
            ]
        },
        {
            "id": "ycNyp",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 18,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 2,
                    "PT": 2,
                    "TT": 2
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/ycNyp",
            "startTime": 1560834969462,
            "sequenceId": 18,
            "endTime": 1560834998000,
            "finalMetricData": [
                {
                    "timestamp": 1560834997900,
                    "trialJobId": "ycNyp",
                    "parameterId": "18",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.198500385778617,\"test-rmse\":4.269447689696311}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834997900,
                    "trialJobId": "ycNyp",
                    "parameterId": "18",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.198500385778617,\"test-rmse\":4.269447689696311}"
                }
            ]
        },
        {
            "id": "EannN",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 19,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 5,
                    "PT": 1,
                    "TT": 4
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/EannN",
            "startTime": 1560834969483,
            "sequenceId": 19,
            "endTime": 1560834998000,
            "finalMetricData": [
                {
                    "timestamp": 1560834997552,
                    "trialJobId": "EannN",
                    "parameterId": "19",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.4458378991034095,\"test-rmse\":4.681977149225176}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834997552,
                    "trialJobId": "EannN",
                    "parameterId": "19",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.4458378991034095,\"test-rmse\":4.681977149225176}"
                }
            ]
        },
        {
            "id": "rcJZn",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 20,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 4,
                    "PT": 1,
                    "TT": 2
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/rcJZn",
            "startTime": 1560834969505,
            "sequenceId": 20,
            "endTime": 1560834995000,
            "finalMetricData": [
                {
                    "timestamp": 1560834995399,
                    "trialJobId": "rcJZn",
                    "parameterId": "20",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.504996080310306,\"test-rmse\":4.8344830263132454}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834995399,
                    "trialJobId": "rcJZn",
                    "parameterId": "20",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.504996080310306,\"test-rmse\":4.8344830263132454}"
                }
            ]
        },
        {
            "id": "ubfna",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 21,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 2,
                    "PT": 6,
                    "TT": 1
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/ubfna",
            "startTime": 1560834969533,
            "sequenceId": 21,
            "endTime": 1560834995000,
            "finalMetricData": [
                {
                    "timestamp": 1560834995465,
                    "trialJobId": "ubfna",
                    "parameterId": "21",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.374924096159413,\"test-rmse\":4.499867384060287}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834995465,
                    "trialJobId": "ubfna",
                    "parameterId": "21",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.374924096159413,\"test-rmse\":4.499867384060287}"
                }
            ]
        },
        {
            "id": "hur1q",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 22,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 1,
                    "PT": 6,
                    "TT": 1
                }
            },
            "logPath": "file://localhost:/Users/chaidi/nni/experiments/rU6J0i83/trials/hur1q",
            "startTime": 1560834969561,
            "sequenceId": 22,
            "endTime": 1560834996000,
            "finalMetricData": [
                {
                    "timestamp": 1560834995984,
                    "trialJobId": "hur1q",
                    "parameterId": "22",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.4155997077622158,\"test-rmse\":4.5115602918049245}"
                }
            ],
            "intermediate": [
                {
                    "timestamp": 1560834995984,
                    "trialJobId": "hur1q",
                    "parameterId": "22",
                    "type": "FINAL",
                    "sequence": 0,
                    "data": "{\"default\":3.4155997077622158,\"test-rmse\":4.5115602918049245}"
                }
            ]
        },
        {
            "id": "jITeW",
            "status": "SUCCEEDED",
            "hyperParameters": {
                "parameter_id": 23,
                "parameter_source": "algorithm",
                "parameters": {
                    "Dataset": "Bike",
                    "City": "NYC",
                    "CT": 4,
                    "PT": 1,
                    "TT": 4
                }
            },
            "logPath": "file://localhost:/Users/chai
Download .txt
gitextract_poyacrh5/

├── .gitignore
├── Experiments/
│   ├── AGCRN/
│   │   ├── AGCRN.py
│   │   ├── Runner.py
│   │   └── params.conf
│   ├── ARIMA/
│   │   ├── ARIMA.py
│   │   ├── ARIMA_Parallel.py
│   │   ├── RunnerARIMA.py
│   │   └── trials.py
│   ├── ASTGCN/
│   │   ├── ASTGCN.py
│   │   ├── Runner.py
│   │   └── configurations/
│   │       ├── METR_LA_astgcn.conf
│   │       ├── PEMS04_astgcn.conf
│   │       └── PEMS08_astgcn.conf
│   ├── CustomizedDemo/
│   │   ├── Runner_topk.py
│   │   ├── STMeta_Obj_topk.py
│   │   ├── STMeta_v0.model.yml
│   │   ├── STMeta_v1.model.yml
│   │   ├── STMeta_v2.model.yml
│   │   ├── STMeta_v3.model.yml
│   │   ├── metro_shanghai.data.yml
│   │   └── topKGraph.py
│   ├── DCRNN/
│   │   ├── DCRNN.py
│   │   ├── bike_trial.py
│   │   ├── cs_trial.py
│   │   ├── didi_trial.py
│   │   ├── metr_trial.py
│   │   ├── metro_trial.py
│   │   ├── pems_trial.py
│   │   └── street_didi_trial.py
│   ├── DeepST/
│   │   ├── DeepST.py
│   │   ├── param_search.yml
│   │   └── search_space.json
│   ├── GBRT/
│   │   ├── GBRT.py
│   │   ├── gbrt_config.yml
│   │   └── gbrt_search_space.json
│   ├── GMAN/
│   │   ├── GMAN.py
│   │   └── Runner.py
│   ├── GraphWaveNet/
│   │   ├── GraphWaveNet.py
│   │   └── Runner.py
│   ├── HM/
│   │   ├── HM.py
│   │   ├── hm_closeness_search_space.json
│   │   ├── hm_config.yml
│   │   └── hm_search_space.json
│   ├── HMM/
│   │   ├── HMM.py
│   │   └── trials.py
│   ├── MTGNN/
│   │   ├── MTGNN.py
│   │   └── Runner.py
│   ├── MultiStepPredict/
│   │   ├── Code/
│   │   │   ├── DirRec_ARIMA.py
│   │   │   ├── DirRec_DCRNN.py
│   │   │   ├── DirRec_STMeta.py
│   │   │   ├── DirRec_XGBoost.py
│   │   │   └── viz.py
│   │   └── README.md
│   ├── ParameterSearch/
│   │   ├── ARIMA.py
│   │   ├── CPT_GBRT.py
│   │   ├── CPT_HM.py
│   │   ├── CPT_STMeta_Obj.py
│   │   ├── CPT_XGBoost.py
│   │   ├── config.yml
│   │   ├── hm_config.yml
│   │   ├── hm_search_space.json
│   │   ├── plot_paper.ipynb
│   │   ├── results/
│   │   │   └── HM_Bike_NYC.json
│   │   ├── search_space.json
│   │   ├── xgboost_config.yml
│   │   └── xgboost_search_space.json
│   ├── RegionGeneration/
│   │   └── region_generation.py
│   ├── STGCN/
│   │   ├── Runner.py
│   │   └── STGCN.py
│   ├── STMeta/
│   │   ├── RunnerCPTtrial.py
│   │   ├── RunnerLSTM.py
│   │   ├── RunnerStreetDiDi.py
│   │   ├── RunnerWWW.py
│   │   ├── Runner_GRU.py
│   │   ├── Runner_M1_0.py
│   │   ├── Runner_M1_1.py
│   │   ├── Runner_M2_0.py
│   │   ├── Runner_M2_1.py
│   │   ├── Runner_Main.py
│   │   ├── Runner_PS_Chicago.py
│   │   ├── Runner_PS_NYC.py
│   │   ├── Runner_PS_Shanghai.py
│   │   ├── Runner_singleGraph.py
│   │   ├── Runner_temporalAblation.py
│   │   ├── Runner_v3.py
│   │   ├── STMeta_Obj.py
│   │   ├── STMeta_Obj_time.py
│   │   ├── STMeta_v0.model.yml
│   │   ├── STMeta_v1.model.yml
│   │   ├── STMeta_v2.model.yml
│   │   ├── STMeta_v3.model.yml
│   │   ├── bike_chicago.data.yml
│   │   ├── bike_dc.data.yml
│   │   ├── bike_nyc.data.yml
│   │   ├── chargestation_beijing.data.yml
│   │   ├── didi_chengdu.data.yml
│   │   ├── didi_chengdu_street.data.yml
│   │   ├── didi_xian.data.yml
│   │   ├── didi_xian_street.data.yml
│   │   ├── gc_search.json
│   │   ├── lstm_search.json
│   │   ├── metr_la.data.yml
│   │   ├── metro_chongqing.data.yml
│   │   ├── metro_shanghai.data.yml
│   │   ├── param_search.yml
│   │   └── pems_bay.data.yml
│   ├── STMeta_Transfer/
│   │   ├── Runner.py
│   │   ├── STMeta_Pretrain.py
│   │   ├── STMeta_Transfer.py
│   │   ├── STMeta_Transfer_Dynamic.py
│   │   ├── STMeta_Transfer_Test.py
│   │   ├── STMeta_v1.model.yml
│   │   ├── STMeta_v2.model.yml
│   │   ├── STMeta_v3.model.yml
│   │   ├── STMeta_v4.model.yml
│   │   ├── bike_chicago.data.yml
│   │   ├── bike_dc.data.yml
│   │   ├── bike_nyc.data.yml
│   │   ├── chargestation_beijing.data.yml
│   │   ├── didi_chengdu.data.yml
│   │   ├── didi_xian.data.yml
│   │   ├── metro_chongqing.data.yml
│   │   ├── metro_shanghai.data.yml
│   │   ├── network_search.json
│   │   ├── param_search.yml
│   │   ├── transfer_record.md
│   │   ├── transfer_record_bk.md
│   │   ├── transfer_result_overall.md
│   │   └── 使用流量匹配的结果.md
│   ├── STSGCN/
│   │   ├── Runner.py
│   │   ├── STSGCN.py
│   │   └── config/
│   │       ├── PEMS03/
│   │       │   ├── STMeta_emb.json
│   │       │   ├── STMeta_emb_1.json
│   │       │   ├── individual_GLU_mask_emb.json
│   │       │   ├── individual_GLU_nomask_emb.json
│   │       │   ├── individual_GLU_nomask_noemb.json
│   │       │   ├── individual_relu_nomask_noemb.json
│   │       │   └── sharing_relu_nomask_noemb.json
│   │       ├── PEMS04/
│   │       │   ├── individual_GLU.json
│   │       │   ├── individual_GLU_mask_emb.json
│   │       │   ├── individual_relu.json
│   │       │   ├── sharing_GLU.json
│   │       │   └── sharing_relu.json
│   │       ├── PEMS07/
│   │       │   └── individual_GLU_mask_emb.json
│   │       └── PEMS08/
│   │           └── individual_GLU_mask_emb.json
│   ├── ST_MGCN/
│   │   ├── ST_MGCN_Obj.py
│   │   ├── bike_trial.py
│   │   ├── cs_trial.py
│   │   ├── didi_trial.py
│   │   ├── metr_trial.py
│   │   ├── metro_trial.py
│   │   ├── param_search.yml
│   │   ├── params_search.json
│   │   ├── pems_trial.py
│   │   └── street_didi_trial.py
│   ├── ST_ResNet/
│   │   ├── ST_ResNet.py
│   │   ├── param_search.yml
│   │   └── search_space.json
│   ├── StabilityTest/
│   │   ├── CPT_AMulti_GCLSTM_Obj.py
│   │   ├── CPT_AMulti_GCLSTM_Simplify_Obj.py
│   │   ├── Master_CS_0.py
│   │   ├── Master_DiDi_0.py
│   │   └── Master_Metro_0.py
│   ├── V3_GACN/
│   │   ├── GACN_Master.py
│   │   └── GACN_Obj.py
│   └── XGBoost/
│       ├── XGBoost.py
│       ├── xgboost_config.yml
│       └── xgboost_search_space.json
├── LICENSE
├── QuickStarts/
│   ├── ARIMA.py
│   ├── DCRNN.py
│   ├── DeepST.py
│   ├── GBRT.py
│   ├── GeoMAN.py
│   ├── GraphWaveNet.py
│   ├── HM.py
│   ├── HMM.py
│   ├── STMeta.py
│   ├── ST_ResNet.py
│   ├── Visualization.py
│   ├── XGBoost.py
│   └── XGBoost_Validate.py
├── README.md
├── UCTB/
│   ├── __init__.py
│   ├── dataset/
│   │   ├── __init__.py
│   │   ├── context_loader.py
│   │   ├── data_loader.py
│   │   └── dataset.py
│   ├── evaluation/
│   │   ├── __init__.py
│   │   └── metric.py
│   ├── model/
│   │   ├── AGCRN.py
│   │   ├── ARIMA.py
│   │   ├── ASTGCN.py
│   │   ├── DCRNN.py
│   │   ├── DeepST.py
│   │   ├── GMAN.py
│   │   ├── GeoMAN.py
│   │   ├── GraphWaveNet.py
│   │   ├── HM.py
│   │   ├── HMM.py
│   │   ├── MCSTGCN.py
│   │   ├── MTGNN.py
│   │   ├── STGCN.py
│   │   ├── STMeta.py
│   │   ├── STSGCN.py
│   │   ├── ST_MGCN.py
│   │   ├── ST_ResNet.py
│   │   ├── XGBoost.py
│   │   └── __init__.py
│   ├── model_unit/
│   │   ├── BaseModel.py
│   │   ├── DCRNN_CELL.py
│   │   ├── GraphModelLayers.py
│   │   ├── ST_RNN.py
│   │   └── __init__.py
│   ├── preprocess/
│   │   ├── GraphGenerator.py
│   │   ├── RegionGenerator.py
│   │   ├── __init__.py
│   │   ├── dataset_helper.py
│   │   ├── preprocessor.py
│   │   └── time_utils.py
│   ├── train/
│   │   ├── EarlyStopping.py
│   │   ├── LossFunction.py
│   │   ├── MiniBatchTrain.py
│   │   └── __init__.py
│   └── utils/
│       ├── __init__.py
│       ├── make_predict_dataset.py
│       ├── multi_threads.py
│       ├── utils_AGCRN.py
│       ├── utils_ASTGCN.py
│       ├── utils_GMAN.py
│       ├── utils_GraphWaveNet.py
│       ├── utils_MTGNN.py
│       ├── utils_STGCN.py
│       └── utils_STSGCN.py
├── __init__.py
├── build.py
├── build_install.py
├── dist/
│   └── UCTB-0.3.5-py3-none-any.whl
├── docs/
│   ├── .buildinfo
│   ├── .doctrees/
│   │   ├── APIReference.doctree
│   │   ├── UCTB.dataset.doctree
│   │   ├── UCTB.doctree
│   │   ├── UCTB.evaluation.doctree
│   │   ├── UCTB.model.doctree
│   │   ├── UCTB.model_unit.doctree
│   │   ├── UCTB.preprocess.doctree
│   │   ├── UCTB.train.doctree
│   │   ├── UCTB.utils.doctree
│   │   ├── environment.pickle
│   │   ├── index.doctree
│   │   ├── md_file/
│   │   │   ├── all_results.doctree
│   │   │   ├── index.doctree
│   │   │   ├── installation.doctree
│   │   │   ├── introduction.doctree
│   │   │   ├── predictive_tool.doctree
│   │   │   ├── quickstart.doctree
│   │   │   ├── src/
│   │   │   │   └── image/
│   │   │   │       └── README.doctree
│   │   │   ├── static/
│   │   │   │   ├── MakeDatasetDiDi.doctree
│   │   │   │   ├── MakeDatasetDiDi_TTI.doctree
│   │   │   │   ├── current_supported_models.doctree
│   │   │   │   ├── experiment_on_bike.doctree
│   │   │   │   ├── experiment_on_chargestation.doctree
│   │   │   │   ├── experiment_on_didi.doctree
│   │   │   │   ├── experiment_on_metro.doctree
│   │   │   │   ├── parameter_search.doctree
│   │   │   │   ├── preprocess_api.doctree
│   │   │   │   ├── quick_start.doctree
│   │   │   │   ├── stable_test.doctree
│   │   │   │   ├── stmeta.doctree
│   │   │   │   └── transfer_record.doctree
│   │   │   ├── tutorial.doctree
│   │   │   ├── uctb_group.doctree
│   │   │   ├── urban_dataset.doctree
│   │   │   └── visualization_tool.doctree
│   │   ├── modules.doctree
│   │   └── update_guide.doctree
│   ├── .nojekyll
│   ├── APIReference.html
│   ├── UCTB.dataset.html
│   ├── UCTB.evaluation.html
│   ├── UCTB.html
│   ├── UCTB.model.html
│   ├── UCTB.model_unit.html
│   ├── UCTB.preprocess.html
│   ├── UCTB.train.html
│   ├── UCTB.utils.html
│   ├── _modules/
│   │   ├── UCTB/
│   │   │   ├── dataset/
│   │   │   │   ├── data_loader.html
│   │   │   │   └── dataset.html
│   │   │   ├── evaluation/
│   │   │   │   └── metric.html
│   │   │   ├── model/
│   │   │   │   ├── ARIMA.html
│   │   │   │   ├── DCRNN.html
│   │   │   │   ├── DeepST.html
│   │   │   │   ├── GeoMAN.html
│   │   │   │   ├── HM.html
│   │   │   │   ├── HMM.html
│   │   │   │   ├── STMeta.html
│   │   │   │   ├── ST_MGCN.html
│   │   │   │   ├── ST_ResNet.html
│   │   │   │   └── XGBoost.html
│   │   │   ├── model_unit/
│   │   │   │   ├── BaseModel.html
│   │   │   │   ├── DCRNN_CELL.html
│   │   │   │   ├── GraphModelLayers.html
│   │   │   │   └── ST_RNN.html
│   │   │   ├── preprocess/
│   │   │   │   ├── preprocessor.html
│   │   │   │   └── time_utils.html
│   │   │   ├── train/
│   │   │   │   ├── EarlyStopping.html
│   │   │   │   └── MiniBatchTrain.html
│   │   │   └── utils/
│   │   │       ├── multi_threads.html
│   │   │       └── st_map.html
│   │   ├── index.html
│   │   └── tensorflow/
│   │       └── python/
│   │           └── keras/
│   │               └── utils/
│   │                   └── tf_utils.html
│   ├── _sources/
│   │   ├── APIReference.rst.txt
│   │   ├── UCTB.dataset.rst.txt
│   │   ├── UCTB.evaluation.rst.txt
│   │   ├── UCTB.model.rst.txt
│   │   ├── UCTB.model_unit.rst.txt
│   │   ├── UCTB.preprocess.rst.txt
│   │   ├── UCTB.rst.txt
│   │   ├── UCTB.train.rst.txt
│   │   ├── UCTB.utils.rst.txt
│   │   ├── index.rst.txt
│   │   ├── md_file/
│   │   │   ├── all_results.md.txt
│   │   │   ├── all_results_setting.md.txt
│   │   │   ├── index.md.txt
│   │   │   ├── installation.md.txt
│   │   │   ├── introduction.md.txt
│   │   │   ├── predictive_tool.md.txt
│   │   │   ├── quickstart.md.txt
│   │   │   ├── src/
│   │   │   │   └── image/
│   │   │   │       └── README.md.txt
│   │   │   ├── static/
│   │   │   │   ├── MakeDatasetDiDi.md.txt
│   │   │   │   ├── MakeDatasetDiDi_TTI.md.txt
│   │   │   │   ├── all_results.md.txt
│   │   │   │   ├── amulti_gclstm.md.txt
│   │   │   │   ├── current_supported_models.md.txt
│   │   │   │   ├── experiment_on_bike.md.txt
│   │   │   │   ├── experiment_on_chargestation.md.txt
│   │   │   │   ├── experiment_on_didi.md.txt
│   │   │   │   ├── experiment_on_metro.md.txt
│   │   │   │   ├── parameter_search.md.txt
│   │   │   │   ├── preprocess_api.md.txt
│   │   │   │   ├── quick_start.md.txt
│   │   │   │   ├── stable_test.md.txt
│   │   │   │   ├── stmeta.md.txt
│   │   │   │   ├── transfer_record.md.txt
│   │   │   │   └── tutorial.md.txt
│   │   │   ├── tutorial.md.txt
│   │   │   ├── uctb_group.md.txt
│   │   │   ├── urban_dataset.md.txt
│   │   │   └── visualization_tool.md.txt
│   │   ├── modules.rst.txt
│   │   └── update_guide.txt
│   ├── _static/
│   │   ├── _sphinx_javascript_frameworks_compat.js
│   │   ├── alabaster.css
│   │   ├── basic.css
│   │   ├── css/
│   │   │   ├── badge_only.css
│   │   │   └── theme.css
│   │   ├── custom.css
│   │   ├── doctools.js
│   │   ├── documentation_options.js
│   │   ├── epub.css
│   │   ├── ie6.css
│   │   ├── jquery-3.1.0.js
│   │   ├── jquery-3.2.1.js
│   │   ├── jquery-3.4.1.js
│   │   ├── jquery-3.5.1.js
│   │   ├── jquery-3.6.0.js
│   │   ├── jquery.js
│   │   ├── js/
│   │   │   └── theme.js
│   │   ├── language_data.js
│   │   ├── misty-light-windows.css
│   │   ├── nature.css
│   │   ├── pygments.css
│   │   ├── pyramid.css
│   │   ├── searchtools.js
│   │   ├── sphinx_highlight.js
│   │   ├── sphinxdoc.css
│   │   ├── underscore-1.13.1.js
│   │   ├── underscore-1.3.1.js
│   │   ├── underscore.js
│   │   └── websupport.js
│   ├── genindex.html
│   ├── index.html
│   ├── md_file/
│   │   ├── all_results.html
│   │   ├── installation.html
│   │   ├── introduction.html
│   │   ├── predictive_tool.html
│   │   ├── src/
│   │   │   └── image/
│   │   │       └── README.html
│   │   ├── static/
│   │   │   ├── stable_test.html
│   │   │   └── transfer_record.html
│   │   ├── uctb_group.html
│   │   ├── urban_dataset.html
│   │   └── visualization_tool.html
│   ├── modules.html
│   ├── objects.inv
│   ├── py-modindex.html
│   ├── search.html
│   ├── searchindex.js
│   ├── sphinx/
│   │   ├── APIReference.rst
│   │   ├── Makefile
│   │   ├── UCTB.dataset.rst
│   │   ├── UCTB.evaluation.rst
│   │   ├── UCTB.model.rst
│   │   ├── UCTB.model_unit.rst
│   │   ├── UCTB.preprocess.rst
│   │   ├── UCTB.rst
│   │   ├── UCTB.train.rst
│   │   ├── UCTB.utils.rst
│   │   ├── _build/
│   │   │   ├── .buildinfo
│   │   │   ├── .doctrees/
│   │   │   │   ├── APIReference.doctree
│   │   │   │   ├── UCTB.dataset.doctree
│   │   │   │   ├── UCTB.doctree
│   │   │   │   ├── UCTB.evaluation.doctree
│   │   │   │   ├── UCTB.model.doctree
│   │   │   │   ├── UCTB.model_unit.doctree
│   │   │   │   ├── UCTB.preprocess.doctree
│   │   │   │   ├── UCTB.train.doctree
│   │   │   │   ├── UCTB.utils.doctree
│   │   │   │   ├── environment.pickle
│   │   │   │   ├── index.doctree
│   │   │   │   ├── md_file/
│   │   │   │   │   ├── all_results.doctree
│   │   │   │   │   ├── installation.doctree
│   │   │   │   │   ├── introduction.doctree
│   │   │   │   │   ├── predictive_tool.doctree
│   │   │   │   │   ├── src/
│   │   │   │   │   │   └── image/
│   │   │   │   │   │       └── README.doctree
│   │   │   │   │   ├── static/
│   │   │   │   │   │   ├── stable_test.doctree
│   │   │   │   │   │   └── transfer_record.doctree
│   │   │   │   │   ├── uctb_group.doctree
│   │   │   │   │   ├── urban_dataset.doctree
│   │   │   │   │   └── visualization_tool.doctree
│   │   │   │   ├── modules.doctree
│   │   │   │   └── update_guide.doctree
│   │   │   ├── .nojekyll
│   │   │   ├── APIReference.html
│   │   │   ├── UCTB.dataset.html
│   │   │   ├── UCTB.evaluation.html
│   │   │   ├── UCTB.html
│   │   │   ├── UCTB.model.html
│   │   │   ├── UCTB.model_unit.html
│   │   │   ├── UCTB.preprocess.html
│   │   │   ├── UCTB.train.html
│   │   │   ├── UCTB.utils.html
│   │   │   ├── _sources/
│   │   │   │   ├── APIReference.rst.txt
│   │   │   │   ├── UCTB.dataset.rst.txt
│   │   │   │   ├── UCTB.evaluation.rst.txt
│   │   │   │   ├── UCTB.model.rst.txt
│   │   │   │   ├── UCTB.model_unit.rst.txt
│   │   │   │   ├── UCTB.preprocess.rst.txt
│   │   │   │   ├── UCTB.rst.txt
│   │   │   │   ├── UCTB.train.rst.txt
│   │   │   │   ├── UCTB.utils.rst.txt
│   │   │   │   ├── index.rst.txt
│   │   │   │   ├── md_file/
│   │   │   │   │   ├── all_results.md.txt
│   │   │   │   │   ├── installation.md.txt
│   │   │   │   │   ├── introduction.md.txt
│   │   │   │   │   ├── predictive_tool.md.txt
│   │   │   │   │   ├── src/
│   │   │   │   │   │   └── image/
│   │   │   │   │   │       └── README.md.txt
│   │   │   │   │   ├── static/
│   │   │   │   │   │   ├── stable_test.md.txt
│   │   │   │   │   │   └── transfer_record.md.txt
│   │   │   │   │   ├── uctb_group.md.txt
│   │   │   │   │   ├── urban_dataset.md.txt
│   │   │   │   │   └── visualization_tool.md.txt
│   │   │   │   ├── modules.rst.txt
│   │   │   │   └── update_guide.txt
│   │   │   ├── _static/
│   │   │   │   ├── _sphinx_javascript_frameworks_compat.js
│   │   │   │   ├── basic.css
│   │   │   │   ├── css/
│   │   │   │   │   ├── badge_only.css
│   │   │   │   │   └── theme.css
│   │   │   │   ├── doctools.js
│   │   │   │   ├── documentation_options.js
│   │   │   │   ├── jquery-3.1.0.js
│   │   │   │   ├── jquery-3.6.0.js
│   │   │   │   ├── jquery.js
│   │   │   │   ├── js/
│   │   │   │   │   └── theme.js
│   │   │   │   ├── language_data.js
│   │   │   │   ├── pygments.css
│   │   │   │   ├── searchtools.js
│   │   │   │   ├── underscore-1.13.1.js
│   │   │   │   ├── underscore-1.3.1.js
│   │   │   │   ├── underscore.js
│   │   │   │   └── websupport.js
│   │   │   ├── genindex.html
│   │   │   ├── index.html
│   │   │   ├── md_file/
│   │   │   │   ├── all_results.html
│   │   │   │   ├── installation.html
│   │   │   │   ├── introduction.html
│   │   │   │   ├── predictive_tool.html
│   │   │   │   ├── src/
│   │   │   │   │   └── image/
│   │   │   │   │       └── README.html
│   │   │   │   ├── static/
│   │   │   │   │   ├── stable_test.html
│   │   │   │   │   └── transfer_record.html
│   │   │   │   ├── uctb_group.html
│   │   │   │   ├── update_guide.html
│   │   │   │   ├── urban_dataset.html
│   │   │   │   └── visualization_tool.html
│   │   │   ├── modules.html
│   │   │   ├── objects.inv
│   │   │   ├── py-modindex.html
│   │   │   ├── search.html
│   │   │   ├── searchindex.js
│   │   │   └── update_guide.html
│   │   ├── conf.py
│   │   ├── index.rst
│   │   ├── make.bat
│   │   ├── md_file/
│   │   │   ├── .gitignore
│   │   │   ├── all_results.md
│   │   │   ├── installation.md
│   │   │   ├── introduction.md
│   │   │   ├── predictive_tool.md
│   │   │   ├── src/
│   │   │   │   └── image/
│   │   │   │       └── README.md
│   │   │   ├── static/
│   │   │   │   ├── stable_test.md
│   │   │   │   └── transfer_record.md
│   │   │   ├── uctb_group.md
│   │   │   ├── urban_dataset.md
│   │   │   └── visualization_tool.md
│   │   ├── modules.rst
│   │   └── update_guide.txt
│   └── update_guide.html
├── environment.yaml
└── setup.py
Download .txt
SYMBOL INDEX (1453 symbols across 89 files)

FILE: Experiments/ARIMA/ARIMA_Parallel.py
  function task (line 38) | def task(share_queue, locker, data, parameters):
  function reduce_fn (line 74) | def reduce_fn(a, b):

FILE: Experiments/ARIMA/trials.py
  function task_func (line 6) | def task_func(share_queue, locker, data, parameters):

FILE: Experiments/CustomizedDemo/topKGraph.py
  class topKGraph (line 6) | class topKGraph(GraphGenerator):  # Init NodeTrafficLoader
    method __init__ (line 8) | def __init__(self,**kwargs):
    method neighbour_adjacent (line 31) | def neighbour_adjacent(self, lat_lng_list, threshold):

FILE: Experiments/DCRNN/DCRNN.py
  class my_data_loader (line 10) | class my_data_loader(NodeTrafficLoader):
    method __init__ (line 12) | def __init__(self, **kwargs):
    method diffusion_matrix (line 21) | def diffusion_matrix(self, filter_type='random_walk'):
  function param_parser (line 39) | def param_parser():

FILE: Experiments/HMM/trials.py
  function task_func (line 6) | def task_func(share_queue, locker, data, parameters):

FILE: Experiments/MTGNN/MTGNN.py
  function str_to_bool (line 12) | def str_to_bool(value):

FILE: Experiments/MultiStepPredict/Code/DirRec_DCRNN.py
  class my_data_loader (line 11) | class my_data_loader(NodeTrafficLoader):
    method __init__ (line 13) | def __init__(self, **kwargs):
    method diffusion_matrix (line 22) | def diffusion_matrix(self, filter_type='random_walk'):

FILE: Experiments/MultiStepPredict/Code/viz.py
  function readMetric (line 5) | def readMetric(metric_name, eva_dir, horizon_num):
  function plot (line 21) | def plot(eva_metric, metric_name, model_name, dataset_name, horizon_num):

FILE: Experiments/ParameterSearch/CPT_GBRT.py
  function show_prediction (line 41) | def show_prediction(prediction, target, station_index, start=0, end=-1):

FILE: Experiments/ParameterSearch/CPT_STMeta_Obj.py
  function cpt_stmeta_param_parser (line 14) | def cpt_stmeta_param_parser():

FILE: Experiments/STMeta_Transfer/STMeta_Pretrain.py
  function show_prediction (line 38) | def show_prediction(pretrain, finetune, transfer, target, station_index,...

FILE: Experiments/STMeta_Transfer/STMeta_Transfer.py
  function show_prediction (line 39) | def show_prediction(pretrain, finetune, transfer, target, station_index,...

FILE: Experiments/STMeta_Transfer/STMeta_Transfer_Dynamic.py
  function show_prediction (line 40) | def show_prediction(pretrain, finetune, transfer, target, station_index,...
  function callback_updating_fm (line 288) | def callback_updating_fm():

FILE: Experiments/STMeta_Transfer/STMeta_Transfer_Test.py
  function show_prediction (line 52) | def show_prediction(pretrain, finetune, transfer, target, station_index,...
  function dynamic_fm (line 355) | def dynamic_fm():

FILE: Experiments/ST_MGCN/ST_MGCN_Obj.py
  function stmeta_param_parser (line 11) | def stmeta_param_parser():

FILE: Experiments/StabilityTest/CPT_AMulti_GCLSTM_Obj.py
  function cpt_stmeta_param_parser (line 13) | def cpt_stmeta_param_parser():
  class SubwayTrafficLoader (line 54) | class SubwayTrafficLoader(NodeTrafficLoader_CPT):
    method __init__ (line 56) | def __init__(self,

FILE: Experiments/StabilityTest/CPT_AMulti_GCLSTM_Simplify_Obj.py
  function cpt_stmeta_param_parser (line 13) | def cpt_stmeta_param_parser():
  class SubwayTrafficLoader (line 54) | class SubwayTrafficLoader(NodeTrafficLoader_CPT):
    method __init__ (line 56) | def __init__(self,

FILE: Experiments/V3_GACN/GACN_Obj.py
  function gacn_param_parser (line 10) | def gacn_param_parser():

FILE: QuickStarts/DCRNN.py
  class my_data_loader (line 9) | class my_data_loader(NodeTrafficLoader):
    method __init__ (line 11) | def __init__(self, **kwargs):
    method diffusion_matrix (line 20) | def diffusion_matrix(self, filter_type='random_walk'):

FILE: QuickStarts/GeoMAN.py
  class GeoMAN_DataLoader (line 9) | class GeoMAN_DataLoader(NodeTrafficLoader):
    method __init__ (line 10) | def __init__(self, input_steps=12, output_steps=1, **kwargs):
    method process_data (line 46) | def process_data(self, trend, period, closeness, ef, y):

FILE: UCTB/dataset/context_loader.py
  class TemporalContextLoader (line 3) | class TemporalContextLoader(ABC):
    method __init__ (line 5) | def __init__(self, traffic_dataloader):
    method get_weather (line 13) | def get_weather(self, arg):
    method get_holiday (line 68) | def get_holiday(sellf, arg):
    method get_temporal_position (line 83) | def get_temporal_position(self, arg):
  class SpatialContextLoader (line 88) | class SpatialContextLoader(ABC):
    method __init__ (line 90) | def __init__(self, traffic_dataloader):
    method get_poi (line 94) | def get_poi(sellf, arg):

FILE: UCTB/dataset/data_loader.py
  class GridTrafficLoader (line 13) | class GridTrafficLoader(object):
    method __init__ (line 15) | def __init__(self,
  class NodeTrafficLoader (line 134) | class NodeTrafficLoader(object):
    method __init__ (line 181) | def __init__(self,
    method st_map (line 352) | def st_map(self, zoom=11, style='mapbox://styles/rmetfc/ck1manozn0edb1...
    method make_concat (line 411) | def make_concat(self, node='all', is_train=True):
  class TransferDataLoader (line 456) | class TransferDataLoader(object):
    method __init__ (line 458) | def __init__(self, sd_params, td_params, model_params, td_data_length=...
    method traffic_sim (line 469) | def traffic_sim(self):
    method traffic_sim_fake (line 494) | def traffic_sim_fake(self):
    method checkin_sim (line 520) | def checkin_sim(self):
    method checkin_sim_sd (line 548) | def checkin_sim_sd(self):
    method poi_sim (line 558) | def poi_sim(self):

FILE: UCTB/dataset/dataset.py
  class DataSet (line 8) | class DataSet(object):
    method __init__ (line 37) | def __init__(self, dataset, MergeIndex, MergeWay, city=None, data_dir=...
    method merge_data (line 100) | def merge_data(self, data, dataType):

FILE: UCTB/evaluation/metric.py
  function rmse (line 4) | def rmse(prediction, target, threshold=None):
  function trunc_rmse (line 22) | def trunc_rmse(prediction, target, threshold=0):
  function mape (line 45) | def mape(prediction, target, threshold=0):
  function mae (line 59) | def mae(prediction, target, threshold=None):
  function trunc_mae (line 75) | def trunc_mae(prediction, target, threshold=0):
  function smape (line 97) | def smape(prediction, target,threshold=0):
  function trunc_smape (line 115) | def trunc_smape(prediction, target, threshold=0):

FILE: UCTB/model/AGCRN.py
  class AVWGCN (line 6) | class AVWGCN(nn.Module):
    method __init__ (line 7) | def __init__(self, dim_in, dim_out, cheb_k, embed_dim):
    method forward (line 12) | def forward(self, x, node_embeddings):
  class AVWDCRNN (line 29) | class AVWDCRNN(nn.Module):
    method __init__ (line 30) | def __init__(self, node_num, dim_in, dim_out, cheb_k, embed_dim, num_l...
    method forward (line 41) | def forward(self, x, init_state, node_embeddings):
    method init_hidden (line 62) | def init_hidden(self, batch_size):
  class AGCRN (line 68) | class AGCRN(nn.Module):
    method __init__ (line 88) | def __init__(self, num_node,input_dim,hidden_dim,output_dim,pred_step,...
    method forward (line 108) | def forward(self, source, targets, teacher_forcing_ratio=0.5):
  class AGCRNCell (line 124) | class AGCRNCell(nn.Module):
    method __init__ (line 125) | def __init__(self, node_num, dim_in, dim_out, cheb_k, embed_dim):
    method forward (line 132) | def forward(self, x, state, node_embeddings):
    method init_hidden_state (line 144) | def init_hidden_state(self, batch_size):

FILE: UCTB/model/ARIMA.py
  class ARIMA (line 9) | class ARIMA(object):
    method __init__ (line 35) | def __init__(self, time_sequence, order=None, seasonal_order=(0, 0, 0,...
    method get_order (line 44) | def get_order(self, series, order=None, max_ar=6, max_ma=2, max_d=2):
    method adf_test (line 72) | def adf_test(time_series, max_lags=None, verbose=True):
    method predict (line 96) | def predict(self, time_sequences, forecast_step=1):

FILE: UCTB/model/ASTGCN.py
  class Spatial_Attention_layer (line 9) | class Spatial_Attention_layer(nn.Module):
    method __init__ (line 13) | def __init__(self, DEVICE, in_channels, num_of_vertices, num_of_timest...
    method forward (line 22) | def forward(self, x):
  class cheb_conv_withSAt (line 41) | class cheb_conv_withSAt(nn.Module):
    method __init__ (line 46) | def __init__(self, K, cheb_polynomials, in_channels, out_channels):
    method forward (line 60) | def forward(self, x, spatial_attention):
  class Temporal_Attention_layer (line 94) | class Temporal_Attention_layer(nn.Module):
    method __init__ (line 95) | def __init__(self, DEVICE, in_channels, num_of_vertices, num_of_timest...
    method forward (line 103) | def forward(self, x):
  class cheb_conv (line 126) | class cheb_conv(nn.Module):
    method __init__ (line 131) | def __init__(self, K, cheb_polynomials, in_channels, out_channels):
    method forward (line 145) | def forward(self, x):
  class ASTGCN_block (line 177) | class ASTGCN_block(nn.Module):
    method __init__ (line 179) | def __init__(self, DEVICE, in_channels, K, nb_chev_filter, nb_time_fil...
    method forward (line 188) | def forward(self, x):
  class ASTGCN_submodule (line 219) | class ASTGCN_submodule(nn.Module):
    method __init__ (line 242) | def __init__(self, DEVICE, num_blocks, in_channels, K, num_chev_filter...
    method forward (line 256) | def forward(self, x):
  function make_model (line 270) | def make_model(DEVICE, nb_block, in_channels, K, nb_chev_filter, nb_time...
  function cheb_polynomial (line 298) | def cheb_polynomial(L_tilde, K):

FILE: UCTB/model/DCRNN.py
  class DCRNN (line 9) | class DCRNN(BaseModel):
    method __init__ (line 38) | def __init__(self,
    method build (line 75) | def build(self, init_vars=True, max_to_keep=5):
    method _get_feed_dict (line 171) | def _get_feed_dict(self,

FILE: UCTB/model/DeepST.py
  class DeepST (line 7) | class DeepST(BaseModel):
    method __init__ (line 37) | def __init__(self,
    method build (line 83) | def build(self):
    method _get_feed_dict (line 141) | def _get_feed_dict(self, closeness_feature=None, period_feature=None, ...

FILE: UCTB/model/GMAN.py
  class Graph (line 6) | class Graph():
    method __init__ (line 7) | def __init__(self, nx_G, is_directed, p, q):
    method node2vec_walk (line 13) | def node2vec_walk(self, walk_length, start_node):
    method simulate_walks (line 39) | def simulate_walks(self, num_walks, walk_length):
    method get_alias_edge (line 54) | def get_alias_edge(self, src, dst):
    method preprocess_transition_probs (line 74) | def preprocess_transition_probs(self):
  function alias_setup (line 106) | def alias_setup(probs):
  function alias_draw (line 138) | def alias_draw(J, q):
  function conv2d (line 151) | def conv2d(x, output_dims, kernel_size, stride = [1, 1],
  function batch_norm (line 171) | def batch_norm(x, is_training, bn_decay):
  function dropout (line 201) | def dropout(x, drop, is_training):
  function placeholder (line 209) | def placeholder(P, Q, N):
  function FC (line 217) | def FC(x, units, activations, bn, bn_decay, is_training, use_bias=True):
  function STEmbedding (line 233) | def STEmbedding(SE, TE, T, D, bn, bn_decay, is_training):
  function spatialAttention (line 259) | def spatialAttention(X, STE, K, d, bn, bn_decay, is_training):
  function temporalAttention (line 298) | def temporalAttention(X, STE, K, d, bn, bn_decay, is_training, mask=True):
  function gatedFusion (line 357) | def gatedFusion(HS, HT, D, bn, bn_decay, is_training):
  function STAttBlock (line 382) | def STAttBlock(X, STE, K, d, bn, bn_decay, is_training, mask=False):
  function transformAttention (line 389) | def transformAttention(X, STE_P, STE_Q, K, d, bn, bn_decay, is_training):
  function GMAN (line 439) | def GMAN(X, TE, SE, P, Q, T, L, K, d, bn, bn_decay, is_training):

FILE: UCTB/model/GeoMAN.py
  class GeoMAN (line 6) | class GeoMAN(BaseModel):
    method __init__ (line 41) | def __init__(self,
    method build (line 78) | def build(self, init_vars=True, max_to_keep=5):
    method _get_feed_dict (line 348) | def _get_feed_dict(self,
  function input_transform (line 379) | def input_transform(local_features,
  function split_timesteps (line 392) | def split_timesteps(inputs):

FILE: UCTB/model/GraphWaveNet.py
  class nconv (line 5) | class nconv(nn.Module):
    method __init__ (line 6) | def __init__(self):
    method forward (line 9) | def forward(self,x, A):
  class linear (line 13) | class linear(nn.Module):
    method __init__ (line 14) | def __init__(self,c_in,c_out):
    method forward (line 18) | def forward(self,x):
  class gcn (line 21) | class gcn(nn.Module):
    method __init__ (line 22) | def __init__(self,c_in,c_out,dropout,support_len=3,order=2):
    method forward (line 30) | def forward(self,x,support):
  class gwnet (line 46) | class gwnet(nn.Module):
    method __init__ (line 73) | def __init__(self, device, num_node, dropout=0.3, supports=None, gcn_b...
    method forward (line 164) | def forward(self, input):

FILE: UCTB/model/HM.py
  class HM (line 7) | class HM(object):
    method __init__ (line 18) | def __init__(self, c, p, t):
    method predict (line 27) | def predict(self, closeness_feature, period_feature, trend_feature):

FILE: UCTB/model/HMM.py
  class HMM (line 5) | class HMM(object):
    method __init__ (line 6) | def __init__(self, num_components, n_iter, hmm_kernal=hmm.GaussianHMM):
    method fit (line 13) | def fit(self, x):
    method predict (line 18) | def predict(self, x, length):

FILE: UCTB/model/MCSTGCN.py
  class MCSTGCN (line 10) | class MCSTGCN(BaseModel):
    method __init__ (line 54) | def __init__(self,
    method build (line 115) | def build(self, init_vars=True, max_to_keep=5):
    method _get_feed_dict (line 385) | def _get_feed_dict(self,

FILE: UCTB/model/MTGNN.py
  class nconv (line 9) | class nconv(nn.Module):
    method __init__ (line 10) | def __init__(self):
    method forward (line 13) | def forward(self,x, A):
  class dy_nconv (line 17) | class dy_nconv(nn.Module):
    method __init__ (line 18) | def __init__(self):
    method forward (line 21) | def forward(self,x, A):
  class linear (line 25) | class linear(nn.Module):
    method __init__ (line 26) | def __init__(self,c_in,c_out,bias=True):
    method forward (line 30) | def forward(self,x):
  class prop (line 34) | class prop(nn.Module):
    method __init__ (line 35) | def __init__(self,c_in,c_out,gdep,dropout,alpha):
    method forward (line 43) | def forward(self,x,adj):
  class mixprop (line 55) | class mixprop(nn.Module):
    method __init__ (line 56) | def __init__(self,c_in,c_out,gdep,dropout,alpha):
    method forward (line 65) | def forward(self,x,adj):
  class dy_mixprop (line 78) | class dy_mixprop(nn.Module):
    method __init__ (line 79) | def __init__(self,c_in,c_out,gdep,dropout,alpha):
    method forward (line 92) | def forward(self,x):
  class dilated_1D (line 122) | class dilated_1D(nn.Module):
    method __init__ (line 123) | def __init__(self, cin, cout, dilation_factor=2):
    method forward (line 129) | def forward(self,input):
  class dilated_inception (line 133) | class dilated_inception(nn.Module):
    method __init__ (line 134) | def __init__(self, cin, cout, dilation_factor=2):
    method forward (line 142) | def forward(self,input):
  class graph_constructor (line 152) | class graph_constructor(nn.Module):
    method __init__ (line 153) | def __init__(self, nnodes, k, dim, device, alpha=3, static_feat=None):
    method forward (line 172) | def forward(self, idx):
    method fullA (line 192) | def fullA(self, idx):
  class graph_global (line 207) | class graph_global(nn.Module):
    method __init__ (line 208) | def __init__(self, nnodes, k, dim, device, alpha=3, static_feat=None):
    method forward (line 213) | def forward(self, idx):
  class graph_undirected (line 217) | class graph_undirected(nn.Module):
    method __init__ (line 218) | def __init__(self, nnodes, k, dim, device, alpha=3, static_feat=None):
    method forward (line 234) | def forward(self, idx):
  class graph_directed (line 256) | class graph_directed(nn.Module):
    method __init__ (line 257) | def __init__(self, nnodes, k, dim, device, alpha=3, static_feat=None):
    method forward (line 276) | def forward(self, idx):
  class LayerNorm (line 297) | class LayerNorm(nn.Module):
    method __init__ (line 299) | def __init__(self, normalized_shape, eps=1e-5, elementwise_affine=True):
    method reset_parameters (line 315) | def reset_parameters(self):
    method forward (line 320) | def forward(self, input, idx):
    method extra_repr (line 326) | def extra_repr(self):
  class gtnet (line 330) | class gtnet(nn.Module):
    method __init__ (line 331) | def __init__(self, gcn_true, buildA_true, gcn_depth, num_nodes, device...
    method forward (line 415) | def forward(self, input, idx=None):

FILE: UCTB/model/STGCN.py
  function build_model (line 5) | def build_model(inputs, n_his, Ks, Kt, blocks, keep_prob):
  function gen_batch (line 45) | def gen_batch(inputs, batch_size, dynamic_batch=False, shuffle=False):
  function cheb_poly_approx (line 76) | def cheb_poly_approx(L, Ks, n):
  function gconv (line 104) | def gconv(x, theta, Ks, c_in, c_out):
  function layer_norm (line 129) | def layer_norm(x, scope):
  function temporal_conv_layer (line 149) | def temporal_conv_layer(x, Kt, c_in, c_out, act_func='relu'):
  function spatio_conv_layer (line 202) | def spatio_conv_layer(x, Ks, c_in, c_out):
  function st_conv_block (line 240) | def st_conv_block(x, Ks, Kt, channels, scope, keep_prob, act_func='GLU'):
  function fully_con_layer (line 267) | def fully_con_layer(x, n, channel, scope):
  function output_layer (line 285) | def output_layer(x, T, scope, act_func='GLU'):
  function variable_summaries (line 311) | def variable_summaries(var, v_name):

FILE: UCTB/model/STMeta.py
  class STMeta (line 10) | class STMeta(BaseModel):
    method __init__ (line 55) | def __init__(self,
    method build (line 119) | def build(self, init_vars=True, max_to_keep=5):
    method _get_feed_dict (line 293) | def _get_feed_dict(self,

FILE: UCTB/model/STSGCN.py
  function position_embedding (line 6) | def position_embedding(data,
  function gcn_operation (line 58) | def gcn_operation(data, adj,
  function stsgcm (line 118) | def stsgcm(data, adj,
  function stsgcl (line 172) | def stsgcl(data, adj,
  function sthgcn_layer_individual (line 222) | def sthgcn_layer_individual(data, adj,
  function sthgcn_layer_sharing (line 288) | def sthgcn_layer_sharing(data, adj,
  function output_layer (line 354) | def output_layer(data, num_of_vertices, input_length, num_of_features,
  function stsgcn (line 409) | def stsgcn(data, adj, label,
  function construct_model (line 480) | def construct_model(config, AM):
  function get_adjacency_matrix (line 538) | def get_adjacency_matrix(distance_df_filename, num_of_vertices,
  function construct_adj (line 594) | def construct_adj(A, steps):

FILE: UCTB/model/ST_MGCN.py
  class ST_MGCN (line 7) | class ST_MGCN(BaseModel):
    method __init__ (line 30) | def __init__(self,
    method build (line 58) | def build(self, init_vars=True, max_to_keep=5):
    method _get_feed_dict (line 133) | def _get_feed_dict(self, traffic_flow, laplace_matrix, target=None, ex...

FILE: UCTB/model/ST_ResNet.py
  class ST_ResNet (line 6) | class ST_ResNet(BaseModel):
    method __init__ (line 33) | def __init__(self,
    method build (line 61) | def build(self):
    method _get_feed_dict (line 136) | def _get_feed_dict(self, closeness_feature=None, period_feature=None, ...

FILE: UCTB/model/XGBoost.py
  class XGBoost (line 5) | class XGBoost():
    method __init__ (line 21) | def __init__(self, n_estimators=10, max_depth=5, verbosity=0, objectiv...
    method fit (line 31) | def fit(self, X, y):
    method predict (line 42) | def predict(self, X):

FILE: UCTB/model_unit/BaseModel.py
  class BaseModel (line 13) | class BaseModel(object):
    method __init__ (line 22) | def __init__(self, code_version, model_dir, gpu_device):
    method build (line 53) | def build(self, init_vars=True, max_to_keep=5):
    method add_summary (line 71) | def add_summary(self, name, value, global_step):
    method _summary_histogram (line 75) | def _summary_histogram(self):
    method _run (line 82) | def _run(self, feed_dict, output_names, op_names):
    method _get_feed_dict (line 95) | def _get_feed_dict(self, **kwargs):
    method fit (line 98) | def fit(self, sequence_length, output_names=('loss', ), op_names=('tra...
    method predict (line 228) | def predict(self, sequence_length, output_names=('prediction', ), cach...
    method manual_summary (line 259) | def manual_summary(self, global_step=None):
    method _log (line 263) | def _log(self, text):
    method _get_log (line 270) | def _get_log(self):
    method save (line 278) | def save(self, subscript, global_step):
    method load (line 294) | def load(self, subscript):
    method close (line 316) | def close(self):
    method load_event_scalar (line 322) | def load_event_scalar(self, scalar_name='val_loss'):

FILE: UCTB/model_unit/DCRNN_CELL.py
  class DCGRUCell (line 11) | class DCGRUCell(RNNCell):
    method call (line 15) | def call(self, inputs, **kwargs):
    method compute_output_shape (line 18) | def compute_output_shape(self, input_shape):
    method __init__ (line 21) | def __init__(self, num_units, input_dim, num_graphs, supports, max_dif...
    method state_size (line 50) | def state_size(self):
    method output_size (line 54) | def output_size(self):
    method __call__ (line 60) | def __call__(self, inputs, state, scope=None):
    method _concat (line 95) | def _concat(x, x_):
    method _fc (line 99) | def _fc(self, inputs, state, output_size, bias_start=0.0):
    method _gconv (line 115) | def _gconv(self, inputs, state, output_size, bias_start=0.0):

FILE: UCTB/model_unit/GraphModelLayers.py
  class GAL (line 9) | class GAL(object):
    method attention_merge_weight (line 14) | def attention_merge_weight(inputs, units, num_head, activation=tf.nn.l...
    method add_ga_layer_matrix (line 43) | def add_ga_layer_matrix(inputs, units, num_head, activation=tf.nn.tanh):
    method add_residual_ga_layer (line 92) | def add_residual_ga_layer(inputs, units, num_head, activation=tf.nn.ta...
  class GCL (line 105) | class GCL(object):
    method add_gc_layer (line 110) | def add_gc_layer(inputs,
    method add_multi_gc_layers (line 179) | def add_multi_gc_layers(inputs, gcn_k, gcn_l, output_size, laplacian_m...

FILE: UCTB/model_unit/ST_RNN.py
  function _generate_dropout_mask (line 9) | def _generate_dropout_mask(ones, rate, training=None, count=1):
  class GCLSTMCell (line 21) | class GCLSTMCell(tf.keras.layers.LSTMCell):
    method __init__ (line 38) | def __init__(self, units, num_node, laplacian_matrix, gcn_k=1, gcn_l=1...
    method build (line 49) | def build(self, input_shape):
    method kth_cheby_ploy (line 65) | def kth_cheby_ploy(self, k, tk1=None, tk2=None):
    method call (line 73) | def call(self, inputs, states, training=None):

FILE: UCTB/preprocess/GraphGenerator.py
  class GraphGenerator (line 11) | class GraphGenerator():
    method __init__ (line 36) | def __init__(self,
    method build_graph (line 66) | def build_graph(self, graph_name):
    method haversine (line 110) | def haversine(lat1, lon1, lat2, lon2):
    method correlation_adjacent (line 127) | def correlation_adjacent(traffic_data, threshold):
    method distance_adjacent (line 144) | def distance_adjacent(self, lat_lng_list, threshold):
    method interaction_adjacent (line 163) | def interaction_adjacent(interaction_matrix, threshold):
    method adjacent_to_laplacian (line 178) | def adjacent_to_laplacian(adjacent_matrix):
  function scaled_Laplacian_ASTGCN (line 192) | def scaled_Laplacian_ASTGCN(W):
  function scaled_laplacian_STGCN (line 217) | def scaled_laplacian_STGCN(W):

FILE: UCTB/preprocess/RegionGenerator.py
  function grid_partition (line 3) | def grid_partition():
  function hexagon_partition (line 7) | def hexagon_partition():
  function roadnetwork_partition (line 11) | def roadnetwork_partition():
  function location_bind (line 15) | def location_bind():
  function async_fluid (line 19) | def async_fluid():
  function node_swapping (line 23) | def node_swapping():
  class RegionGenerator (line 27) | class RegionGenerator():
    method __init__ (line 50) | def __init__(self,spatial_range,area_limit) -> None:
    method partition (line 58) | def partition(self,method,**params) -> any:
    method bind (line 64) | def bind(self,df,method,**params) -> any:
    method aggregate (line 71) | def aggregate(self,cluster_method,merge_way='sum',**params) -> any:

FILE: UCTB/preprocess/dataset_helper.py
  function print_dic_info (line 6) | def print_dic_info(dic, dic_name, tag=''):
  function get_timedelta (line 23) | def get_timedelta(dic):
  function build_uctb_dataset (line 27) | def build_uctb_dataset(traffic_node, time_fitness, node_station_info, ti...
  function convert_uctb_data (line 80) | def convert_uctb_data():

FILE: UCTB/preprocess/preprocessor.py
  class Normalizer (line 5) | class Normalizer(ABC):
    method __init__ (line 13) | def __init__(self, X):
    method transform (line 16) | def transform(self, X_in):
    method inverse_transform (line 19) | def inverse_transform(self, X_in):
  class MaxMinNormalizer (line 24) | class MaxMinNormalizer(Normalizer):
    method __init__ (line 32) | def __init__(self, X,method='all'):
    method transform (line 39) | def transform(self, X):
    method inverse_transform (line 53) | def inverse_transform(self, X):
  class WhiteNormalizer (line 68) | class WhiteNormalizer(Normalizer):
    method __init__ (line 72) | def __init__(self, X,method='all'):
    method transform (line 75) | def transform(self, X):
    method inverse_transform (line 86) | def inverse_transform(self, X):
  class ZscoreNormalizer (line 97) | class ZscoreNormalizer(Normalizer):
    method __init__ (line 105) | def __init__(self, X,method='all'):
    method transform (line 112) | def transform(self, X):
    method inverse_transform (line 126) | def inverse_transform(self, X):
  class MoveSample (line 141) | class MoveSample(object):
    method __init__ (line 142) | def __init__(self, feature_step, feature_stride, feature_length, targe...
    method general_move_sample (line 148) | def general_move_sample(self, data):
  class ST_MoveSample (line 162) | class ST_MoveSample(object):
    method __init__ (line 177) | def __init__(self, closeness_len, period_len, trend_len, target_length...
    method move_sample (line 194) | def move_sample(self, data):
  class SplitData (line 239) | class SplitData(object):
    method split_data (line 244) | def split_data(data, ratio_list):
    method split_feed_dict (line 262) | def split_feed_dict(feed_dict, sequence_length, ratio_list):
  function chooseNormalizer (line 282) | def chooseNormalizer(in_arg,X_train):

FILE: UCTB/preprocess/time_utils.py
  function is_work_day_america (line 9) | def is_work_day_america(date, city):
  function is_work_day_china (line 32) | def is_work_day_china(date, city):
  function is_valid_date (line 46) | def is_valid_date(date_str):

FILE: UCTB/train/EarlyStopping.py
  class EarlyStopping (line 4) | class EarlyStopping(object):
    method __init__ (line 17) | def __init__(self, patience):
    method stop (line 23) | def stop(self, new_value):
  class EarlyStoppingTTest (line 47) | class EarlyStoppingTTest(object):
    method __init__ (line 64) | def __init__(self, length, p_value_threshold):
    method stop (line 70) | def stop(self, new_value):

FILE: UCTB/train/LossFunction.py
  function masked_mse (line 6) | def masked_mse(preds, labels, null_val=np.nan):
  function masked_rmse (line 22) | def masked_rmse(preds, labels, null_val=np.nan):
  function masked_mae (line 27) | def masked_mae(preds, labels, null_val=np.nan):
  function masked_mape (line 41) | def masked_mape(preds, labels, null_val=np.nan):
  function mae_loss (line 55) | def mae_loss(pred, label):
  function mask_np (line 72) | def mask_np(array, null_val):
  function masked_mse_np (line 82) | def masked_mse_np(y_true, y_pred, null_val=np.nan):
  function masked_mae_np (line 92) | def masked_mae_np(y_true, y_pred, null_val=np.nan):
  function masked_mape_np (line 102) | def masked_mape_np(y_true, y_pred, null_val=np.nan):
  function masked_mae_loss (line 114) | def masked_mae_loss(mask_value):
  function MAE_torch (line 123) | def MAE_torch(pred, true, mask_value=None):
  function huber_loss (line 132) | def huber_loss(data, label, rho=1):

FILE: UCTB/train/MiniBatchTrain.py
  class MiniBatchTrain (line 4) | class MiniBatchTrain():
    method __init__ (line 13) | def __init__(self, X, Y, batch_size):
    method shuffle (line 28) | def shuffle(X, Y):
    method get_batch (line 36) | def get_batch(self):
    method restart (line 53) | def restart(self):
  class MiniBatchTrainMultiData (line 61) | class MiniBatchTrainMultiData(object):
    method __init__ (line 70) | def __init__(self, data, batch_size, shuffle=True):
    method shuffle (line 85) | def shuffle(data):
    method get_batch (line 90) | def get_batch(self):
    method restart (line 105) | def restart(self):
  class MiniBatchFeedDict (line 113) | class MiniBatchFeedDict(object):
    method __init__ (line 124) | def __init__(self, feed_dict, sequence_length, batch_size, shuffle=True):
    method get_batch (line 149) | def get_batch(self):
    method shuffle (line 171) | def shuffle(data):
    method restart (line 176) | def restart(self):

FILE: UCTB/utils/make_predict_dataset.py
  function save_predict_in_dataset (line 9) | def save_predict_in_dataset(data_loader, predict_val, method):
  function save_predict_and_graph_in_tsv_and_array (line 47) | def save_predict_and_graph_in_tsv_and_array(data_loader, prediction,args...
  function save_predict_and_graph_in_tsv_and_array (line 140) | def save_predict_and_graph_in_tsv_and_array(data_loader, prediction,args...

FILE: UCTB/utils/multi_threads.py
  function multiple_process (line 8) | def multiple_process(distribute_list, partition_func, task_func, n_jobs,...

FILE: UCTB/utils/utils_AGCRN.py
  class Trainer (line 13) | class Trainer(object):
    method __init__ (line 14) | def __init__(self, model, train_loader, val_loader, test_loader,
    method val_epoch (line 61) | def val_epoch(self, epoch, val_dataloader):
    method train_epoch (line 79) | def train_epoch(self, epoch):
    method train (line 122) | def train(self):
    method save_checkpoint (line 181) | def save_checkpoint(self):
    method test (line 191) | def test(model, args, data_loader,  logger, path=None):
    method _compute_sampling_threshold (line 213) | def _compute_sampling_threshold(global_step, k):
  function get_dataloader_AGCRN (line 223) | def get_dataloader_AGCRN(data_loader, batchsize, tod=False, dow=False, w...
  function data_loader_torch (line 266) | def data_loader_torch(X, Y, batch_size, shuffle=True, drop_last=True):
  function get_logger (line 276) | def get_logger(root, name=None, debug=True):

FILE: UCTB/utils/utils_ASTGCN.py
  function load_data (line 18) | def load_data(data_loader, DEVICE, batch_size, shuffle=True):
  function train_main (line 110) | def train_main(training_config, params_path, DEVICE, net, val_loader, tr...
  function predict_main (line 249) | def predict_main(net, global_step, data_loader, data_target_tensor, para...
  function compute_val_loss_mstgcn (line 302) | def compute_val_loss_mstgcn(net, val_loader, criterion,  masked_flag,mis...

FILE: UCTB/utils/utils_GMAN.py
  function build_model (line 14) | def build_model(log, time_fitness, trainX, args, SE):
  function Train (line 55) | def Train(log, args, trainX, trainY, trainTE, valX, valTE, valY, X, TE, ...
  function Test (line 157) | def Test(log, args, testX, testTE, X, TE, is_training, sess, pred):
  function load_data (line 181) | def load_data(args, data_loader):
  function placeholder (line 268) | def placeholder(P, Q, N):
  function graph_to_adj_files (line 276) | def graph_to_adj_files(adjacent_matrix, Adj_file):
  function read_graph (line 287) | def read_graph(edgelist):
  function learn_embeddings (line 294) | def learn_embeddings(walks, dimensions, output_file, epochs):
  function seq2instance (line 303) | def seq2instance(data, P, Q):
  function log_string (line 314) | def log_string(log, string):

FILE: UCTB/utils/utils_GraphWaveNet.py
  class trainer (line 12) | class trainer():
    method __init__ (line 13) | def __init__(self, in_dim, seq_length, num_nodes, nhid , dropout, lrat...
    method train (line 20) | def train(self, input, real_val):
    method eval (line 41) | def eval(self, input, real_val):
  function Training (line 55) | def Training(args, dataloader,  device, engine):
  function Test (line 118) | def Test(args,dataloader,device,engine,epoch_id, loss_id):
  class DataLoader (line 142) | class DataLoader(object):
    method __init__ (line 143) | def __init__(self, xs, ys, batch_size, pad_with_last_sample=True):
    method shuffle (line 163) | def shuffle(self):
    method get_iterator (line 169) | def get_iterator(self):
  function load_dataset (line 185) | def load_dataset(uctb_data_loader, batch_size, valid_batch_size=None, te...

FILE: UCTB/utils/utils_MTGNN.py
  function normal_std (line 14) | def normal_std(x):
  class Trainer (line 16) | class Trainer():
    method __init__ (line 17) | def __init__(self, model, lrate, wdecay, clip, step_size, seq_out_len,...
    method train (line 29) | def train(self, input, real_val, idx=None):
    method eval (line 55) | def eval(self, input, real_val):
  class DataLoaderS (line 66) | class DataLoaderS(object):
    method __init__ (line 68) | def __init__(self, file_name, train, valid, device, horizon, window, n...
    method _normalized (line 91) | def _normalized(self, normalize):
    method _split (line 106) | def _split(self, train, valid, test):
    method _batchify (line 115) | def _batchify(self, idx_set, horizon):
    method get_batches (line 126) | def get_batches(self, inputs, targets, batch_size, shuffle=True):
  class DataLoaderM (line 143) | class DataLoaderM(object):
    method __init__ (line 144) | def __init__(self, xs, ys, batch_size, pad_with_last_sample=True):
    method shuffle (line 164) | def shuffle(self):
    method get_iterator (line 170) | def get_iterator(self):
  class StandardScaler (line 183) | class StandardScaler():
    method __init__ (line 187) | def __init__(self, mean, std):
    method transform (line 190) | def transform(self, data):
    method inverse_transform (line 192) | def inverse_transform(self, data):
  function sym_adj (line 196) | def sym_adj(adj):
  function asym_adj (line 205) | def asym_adj(adj):
  function calculate_normalized_laplacian (line 214) | def calculate_normalized_laplacian(adj):
  function calculate_scaled_laplacian (line 229) | def calculate_scaled_laplacian(adj_mx, lambda_max=2, undirected=True):
  function load_pickle (line 243) | def load_pickle(pickle_file):
  function load_adj (line 255) | def load_adj(pkl_filename):
  class Optim (line 259) | class Optim(object):
    method _makeOptimizer (line 261) | def _makeOptimizer(self):
    method __init__ (line 273) | def __init__(self, params, method, lr, clip, lr_decay=1, start_decay_a...
    method step (line 285) | def step(self):
    method updateLearningRate (line 307) | def updateLearningRate(self, ppl, epoch):
  function masked_mse (line 342) | def masked_mse(preds, labels, null_val=np.nan):
  function masked_rmse (line 355) | def masked_rmse(preds, labels, null_val=np.nan):
  function masked_mae (line 359) | def masked_mae(preds, labels, null_val=np.nan):
  function masked_mape (line 372) | def masked_mape(preds, labels, null_val=np.nan):
  function metric (line 386) | def metric(pred, real):
  function load_node_feature (line 393) | def load_node_feature(path):
  function normal_std (line 408) | def normal_std(x):
  function load_dataset (line 413) | def load_dataset(uctb_data_loader, batch_size, valid_batch_size=None, te...

FILE: UCTB/utils/utils_STGCN.py
  function model_save (line 9) | def model_save(sess, global_steps, model_name, save_path='./output/model...
  function model_train (line 24) | def model_train(inputs, blocks, args, sum_path='./output/models'):
  function model_test (line 105) | def model_test(inputs, batch_size, n_his, n_pred, inf_mode, load_path='....
  function data_gen (line 146) | def data_gen(data_loader):
  function multi_pred (line 187) | def multi_pred(sess, y_pred, seq, batch_size, n_his, n_pred, step_idx, d...
  class Dataset (line 220) | class Dataset(object):
    method __init__ (line 221) | def __init__(self, data):
    method get_data (line 225) | def get_data(self, type):
    method get_len (line 230) | def get_len(self, type):

FILE: UCTB/utils/utils_STSGCN.py
  function configData (line 11) | def configData(args, data_loader, batch_size, config, ctx):
  function training (line 112) | def training(epochs, metric, mod, train_loader, val_loader, test_loader,...

FILE: docs/_static/_sphinx_javascript_frameworks_compat.js
  function highlight (line 62) | function highlight(node, addItems) {

FILE: docs/_static/doctools.js
  function highlight (line 69) | function highlight(node) {

FILE: docs/_static/jquery-3.1.0.js
  function DOMEval (line 77) | function DOMEval( code, doc ) {
  function isArrayLike (line 528) | function isArrayLike( obj ) {
  function Sizzle (line 760) | function Sizzle( selector, context, results, seed ) {
  function createCache (line 899) | function createCache() {
  function markFunction (line 917) | function markFunction( fn ) {
  function assert (line 926) | function assert( fn ) {
  function addHandle (line 948) | function addHandle( attrs, handler ) {
  function siblingCheck (line 963) | function siblingCheck( a, b ) {
  function createInputPseudo (line 989) | function createInputPseudo( type ) {
  function createButtonPseudo (line 1000) | function createButtonPseudo( type ) {
  function createDisabledPseudo (line 1011) | function createDisabledPseudo( disabled ) {
  function createPositionalPseudo (line 1039) | function createPositionalPseudo( fn ) {
  function testContext (line 1062) | function testContext( context ) {
  function setFilters (line 2118) | function setFilters() {}
  function toSelector (line 2189) | function toSelector( tokens ) {
  function addCombinator (line 2199) | function addCombinator( matcher, combinator, base ) {
  function elementMatcher (line 2261) | function elementMatcher( matchers ) {
  function multipleContexts (line 2275) | function multipleContexts( selector, contexts, results ) {
  function condense (line 2284) | function condense( unmatched, map, filter, context, xml ) {
  function setMatcher (line 2305) | function setMatcher( preFilter, selector, matcher, postFilter, postFinde...
  function matcherFromTokens (line 2398) | function matcherFromTokens( tokens ) {
  function matcherFromGroupMatchers (line 2456) | function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
  function winnow (line 2798) | function winnow( elements, qualifier, not ) {
  function sibling (line 3094) | function sibling( cur, dir ) {
  function createOptions (line 3170) | function createOptions( options ) {
  function Identity (line 3395) | function Identity( v ) {
  function Thrower (line 3398) | function Thrower( ex ) {
  function adoptValue (line 3402) | function adoptValue( value, resolve, reject ) {
  function resolve (line 3494) | function resolve( depth, deferred, handler, special ) {
  function completed (line 3860) | function completed() {
  function Data (line 3959) | function Data() {
  function dataAttr (line 4128) | function dataAttr( elem, key, data ) {
  function adjustCSS (line 4448) | function adjustCSS( elem, prop, valueParts, tween ) {
  function getDefaultDisplay (line 4513) | function getDefaultDisplay( elem ) {
  function showHide (line 4536) | function showHide( elements, show ) {
  function getAll (line 4637) | function getAll( context, tag ) {
  function setGlobalEval (line 4654) | function setGlobalEval( elems, refElements ) {
  function buildFragment (line 4670) | function buildFragment( elems, context, scripts, selection, ignored ) {
  function returnTrue (line 4793) | function returnTrue() {
  function returnFalse (line 4797) | function returnFalse() {
  function safeActiveElement (line 4803) | function safeActiveElement() {
  function on (line 4809) | function on( elem, types, selector, data, fn, one ) {
  function manipulationTarget (line 5518) | function manipulationTarget( elem, content ) {
  function disableScript (line 5529) | function disableScript( elem ) {
  function restoreScript (line 5533) | function restoreScript( elem ) {
  function cloneCopyEvent (line 5545) | function cloneCopyEvent( src, dest ) {
  function fixInput (line 5580) | function fixInput( src, dest ) {
  function domManip (line 5593) | function domManip( collection, args, callback, ignored ) {
  function remove (line 5683) | function remove( elem, selector, keepData ) {
  function computeStyleTests (line 5976) | function computeStyleTests() {
  function curCSS (line 6050) | function curCSS( elem, name, computed ) {
  function addGetHookIf (line 6097) | function addGetHookIf( conditionFn, hookFn ) {
  function vendorPropName (line 6133) | function vendorPropName( name ) {
  function setPositiveNumber (line 6152) | function setPositiveNumber( elem, value, subtract ) {
  function augmentWidthOrHeight (line 6164) | function augmentWidthOrHeight( elem, name, extra, isBorderBox, styles ) {
  function getWidthOrHeight (line 6208) | function getWidthOrHeight( elem, name, extra ) {
  function Tween (line 6516) | function Tween( elem, options, prop, end, easing ) {
  function raf (line 6639) | function raf() {
  function createFxNow (line 6647) | function createFxNow() {
  function genFx (line 6655) | function genFx( type, includeWidth ) {
  function createTween (line 6675) | function createTween( value, prop, animation ) {
  function defaultPrefilter (line 6689) | function defaultPrefilter( elem, props, opts ) {
  function propFilter (line 6860) | function propFilter( props, specialEasing ) {
  function Animation (line 6897) | function Animation( elem, properties, options ) {
  function getClass (line 7588) | function getClass( elem ) {
  function buildParams (line 8213) | function buildParams( prefix, obj, traditional, add ) {
  function addToPrefiltersOrTransports (line 8359) | function addToPrefiltersOrTransports( structure ) {
  function inspectPrefiltersOrTransports (line 8393) | function inspectPrefiltersOrTransports( structure, options, originalOpti...
  function ajaxExtend (line 8422) | function ajaxExtend( target, src ) {
  function ajaxHandleResponses (line 8442) | function ajaxHandleResponses( s, jqXHR, responses ) {
  function ajaxConvert (line 8500) | function ajaxConvert( s, response, jqXHR, isSuccess ) {
  function done (line 9013) | function done( status, nativeStatusText, responses, headers ) {
  function getWindow (line 9738) | function getWindow( elem ) {

FILE: docs/_static/jquery-3.2.1.js
  function DOMEval (line 76) | function DOMEval( code, doc ) {
  function isArrayLike (line 522) | function isArrayLike( obj ) {
  function Sizzle (line 754) | function Sizzle( selector, context, results, seed ) {
  function createCache (line 893) | function createCache() {
  function markFunction (line 911) | function markFunction( fn ) {
  function assert (line 920) | function assert( fn ) {
  function addHandle (line 942) | function addHandle( attrs, handler ) {
  function siblingCheck (line 957) | function siblingCheck( a, b ) {
  function createInputPseudo (line 983) | function createInputPseudo( type ) {
  function createButtonPseudo (line 994) | function createButtonPseudo( type ) {
  function createDisabledPseudo (line 1005) | function createDisabledPseudo( disabled ) {
  function createPositionalPseudo (line 1061) | function createPositionalPseudo( fn ) {
  function testContext (line 1084) | function testContext( context ) {
  function setFilters (line 2166) | function setFilters() {}
  function toSelector (line 2237) | function toSelector( tokens ) {
  function addCombinator (line 2247) | function addCombinator( matcher, combinator, base ) {
  function elementMatcher (line 2311) | function elementMatcher( matchers ) {
  function multipleContexts (line 2325) | function multipleContexts( selector, contexts, results ) {
  function condense (line 2334) | function condense( unmatched, map, filter, context, xml ) {
  function setMatcher (line 2355) | function setMatcher( preFilter, selector, matcher, postFilter, postFinde...
  function matcherFromTokens (line 2448) | function matcherFromTokens( tokens ) {
  function matcherFromGroupMatchers (line 2506) | function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
  function nodeName (line 2842) | function nodeName( elem, name ) {
  function winnow (line 2854) | function winnow( elements, qualifier, not ) {
  function sibling (line 3157) | function sibling( cur, dir ) {
  function createOptions (line 3244) | function createOptions( options ) {
  function Identity (line 3469) | function Identity( v ) {
  function Thrower (line 3472) | function Thrower( ex ) {
  function adoptValue (line 3476) | function adoptValue( value, resolve, reject, noValue ) {
  function resolve (line 3569) | function resolve( depth, deferred, handler, special ) {
  function completed (line 3927) | function completed() {
  function Data (line 4029) | function Data() {
  function getData (line 4198) | function getData( data ) {
  function dataAttr (line 4223) | function dataAttr( elem, key, data ) {
  function adjustCSS (line 4536) | function adjustCSS( elem, prop, valueParts, tween ) {
  function getDefaultDisplay (line 4601) | function getDefaultDisplay( elem ) {
  function showHide (line 4624) | function showHide( elements, show ) {
  function getAll (line 4725) | function getAll( context, tag ) {
  function setGlobalEval (line 4750) | function setGlobalEval( elems, refElements ) {
  function buildFragment (line 4766) | function buildFragment( elems, context, scripts, selection, ignored ) {
  function returnTrue (line 4889) | function returnTrue() {
  function returnFalse (line 4893) | function returnFalse() {
  function safeActiveElement (line 4899) | function safeActiveElement() {
  function on (line 4905) | function on( elem, types, selector, data, fn, one ) {
  function manipulationTarget (line 5634) | function manipulationTarget( elem, content ) {
  function disableScript (line 5645) | function disableScript( elem ) {
  function restoreScript (line 5649) | function restoreScript( elem ) {
  function cloneCopyEvent (line 5661) | function cloneCopyEvent( src, dest ) {
  function fixInput (line 5696) | function fixInput( src, dest ) {
  function domManip (line 5709) | function domManip( collection, args, callback, ignored ) {
  function remove (line 5799) | function remove( elem, selector, keepData ) {
  function computeStyleTests (line 6092) | function computeStyleTests() {
  function curCSS (line 6166) | function curCSS( elem, name, computed ) {
  function addGetHookIf (line 6219) | function addGetHookIf( conditionFn, hookFn ) {
  function vendorPropName (line 6256) | function vendorPropName( name ) {
  function finalPropName (line 6277) | function finalPropName( name ) {
  function setPositiveNumber (line 6285) | function setPositiveNumber( elem, value, subtract ) {
  function augmentWidthOrHeight (line 6297) | function augmentWidthOrHeight( elem, name, extra, isBorderBox, styles ) {
  function getWidthOrHeight (line 6343) | function getWidthOrHeight( elem, name, extra ) {
  function Tween (line 6652) | function Tween( elem, options, prop, end, easing ) {
  function schedule (line 6775) | function schedule() {
  function createFxNow (line 6788) | function createFxNow() {
  function genFx (line 6796) | function genFx( type, includeWidth ) {
  function createTween (line 6816) | function createTween( value, prop, animation ) {
  function defaultPrefilter (line 6830) | function defaultPrefilter( elem, props, opts ) {
  function propFilter (line 7001) | function propFilter( props, specialEasing ) {
  function Animation (line 7038) | function Animation( elem, properties, options ) {
  function stripAndCollapse (line 7753) | function stripAndCollapse( value ) {
  function getClass (line 7759) | function getClass( elem ) {
  function buildParams (line 8383) | function buildParams( prefix, obj, traditional, add ) {
  function addToPrefiltersOrTransports (line 8533) | function addToPrefiltersOrTransports( structure ) {
  function inspectPrefiltersOrTransports (line 8567) | function inspectPrefiltersOrTransports( structure, options, originalOpti...
  function ajaxExtend (line 8596) | function ajaxExtend( target, src ) {
  function ajaxHandleResponses (line 8616) | function ajaxHandleResponses( s, jqXHR, responses ) {
  function ajaxConvert (line 8674) | function ajaxConvert( s, response, jqXHR, isSuccess ) {
  function done (line 9187) | function done( status, nativeStatusText, responses, headers ) {

FILE: docs/_static/jquery-3.4.1.js
  function DOMEval (line 98) | function DOMEval( code, node, doc ) {
  function toType (line 128) | function toType( obj ) {
  function isArrayLike (line 496) | function isArrayLike( obj ) {
  function Sizzle (line 729) | function Sizzle( selector, context, results, seed ) {
  function createCache (line 871) | function createCache() {
  function markFunction (line 889) | function markFunction( fn ) {
  function assert (line 898) | function assert( fn ) {
  function addHandle (line 920) | function addHandle( attrs, handler ) {
  function siblingCheck (line 935) | function siblingCheck( a, b ) {
  function createInputPseudo (line 961) | function createInputPseudo( type ) {
  function createButtonPseudo (line 972) | function createButtonPseudo( type ) {
  function createDisabledPseudo (line 983) | function createDisabledPseudo( disabled ) {
  function createPositionalPseudo (line 1039) | function createPositionalPseudo( fn ) {
  function testContext (line 1062) | function testContext( context ) {
  function setFilters (line 2150) | function setFilters() {}
  function toSelector (line 2221) | function toSelector( tokens ) {
  function addCombinator (line 2231) | function addCombinator( matcher, combinator, base ) {
  function elementMatcher (line 2295) | function elementMatcher( matchers ) {
  function multipleContexts (line 2309) | function multipleContexts( selector, contexts, results ) {
  function condense (line 2318) | function condense( unmatched, map, filter, context, xml ) {
  function setMatcher (line 2339) | function setMatcher( preFilter, selector, matcher, postFilter, postFinde...
  function matcherFromTokens (line 2432) | function matcherFromTokens( tokens ) {
  function matcherFromGroupMatchers (line 2490) | function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
  function nodeName (line 2826) | function nodeName( elem, name ) {
  function winnow (line 2836) | function winnow( elements, qualifier, not ) {
  function sibling (line 3131) | function sibling( cur, dir ) {
  function createOptions (line 3218) | function createOptions( options ) {
  function Identity (line 3443) | function Identity( v ) {
  function Thrower (line 3446) | function Thrower( ex ) {
  function adoptValue (line 3450) | function adoptValue( value, resolve, reject, noValue ) {
  function resolve (line 3543) | function resolve( depth, deferred, handler, special ) {
  function completed (line 3908) | function completed() {
  function fcamelCase (line 4003) | function fcamelCase( all, letter ) {
  function camelCase (line 4010) | function camelCase( string ) {
  function Data (line 4027) | function Data() {
  function getData (line 4196) | function getData( data ) {
  function dataAttr (line 4221) | function dataAttr( elem, key, data ) {
  function adjustCSS (line 4554) | function adjustCSS( elem, prop, valueParts, tween ) {
  function getDefaultDisplay (line 4622) | function getDefaultDisplay( elem ) {
  function showHide (line 4645) | function showHide( elements, show ) {
  function getAll (line 4746) | function getAll( context, tag ) {
  function setGlobalEval (line 4771) | function setGlobalEval( elems, refElements ) {
  function buildFragment (line 4787) | function buildFragment( elems, context, scripts, selection, ignored ) {
  function returnTrue (line 4908) | function returnTrue() {
  function returnFalse (line 4912) | function returnFalse() {
  function expectSync (line 4922) | function expectSync( elem, type ) {
  function safeActiveElement (line 4929) | function safeActiveElement() {
  function on (line 4935) | function on( elem, types, selector, data, fn, one ) {
  function leverageNative (line 5420) | function leverageNative( el, type, expectSync ) {
  function manipulationTarget (line 5791) | function manipulationTarget( elem, content ) {
  function disableScript (line 5802) | function disableScript( elem ) {
  function restoreScript (line 5806) | function restoreScript( elem ) {
  function cloneCopyEvent (line 5816) | function cloneCopyEvent( src, dest ) {
  function fixInput (line 5851) | function fixInput( src, dest ) {
  function domManip (line 5864) | function domManip( collection, args, callback, ignored ) {
  function remove (line 5956) | function remove( elem, selector, keepData ) {
  function computeStyleTests (line 6249) | function computeStyleTests() {
  function roundPixelMeasures (line 6293) | function roundPixelMeasures( measure ) {
  function curCSS (line 6338) | function curCSS( elem, name, computed ) {
  function addGetHookIf (line 6391) | function addGetHookIf( conditionFn, hookFn ) {
  function vendorPropName (line 6416) | function vendorPropName( name ) {
  function finalPropName (line 6431) | function finalPropName( name ) {
  function setPositiveNumber (line 6457) | function setPositiveNumber( elem, value, subtract ) {
  function boxModelAdjustment (line 6469) | function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, ...
  function getWidthOrHeight (line 6537) | function getWidthOrHeight( elem, dimension, extra ) {
  function Tween (line 6904) | function Tween( elem, options, prop, end, easing ) {
  function schedule (line 7027) | function schedule() {
  function createFxNow (line 7040) | function createFxNow() {
  function genFx (line 7048) | function genFx( type, includeWidth ) {
  function createTween (line 7068) | function createTween( value, prop, animation ) {
  function defaultPrefilter (line 7082) | function defaultPrefilter( elem, props, opts ) {
  function propFilter (line 7254) | function propFilter( props, specialEasing ) {
  function Animation (line 7291) | function Animation( elem, properties, options ) {
  function stripAndCollapse (line 8006) | function stripAndCollapse( value ) {
  function getClass (line 8012) | function getClass( elem ) {
  function classesToArray (line 8016) | function classesToArray( value ) {
  function buildParams (line 8638) | function buildParams( prefix, obj, traditional, add ) {
  function addToPrefiltersOrTransports (line 8792) | function addToPrefiltersOrTransports( structure ) {
  function inspectPrefiltersOrTransports (line 8826) | function inspectPrefiltersOrTransports( structure, options, originalOpti...
  function ajaxExtend (line 8855) | function ajaxExtend( target, src ) {
  function ajaxHandleResponses (line 8875) | function ajaxHandleResponses( s, jqXHR, responses ) {
  function ajaxConvert (line 8933) | function ajaxConvert( s, response, jqXHR, isSuccess ) {
  function done (line 9448) | function done( status, nativeStatusText, responses, headers ) {

FILE: docs/_static/jquery-3.5.1.js
  function DOMEval (line 103) | function DOMEval( code, node, doc ) {
  function toType (line 133) | function toType( obj ) {
  function isArrayLike (line 503) | function isArrayLike( obj ) {
  function Sizzle (line 755) | function Sizzle( selector, context, results, seed ) {
  function createCache (line 903) | function createCache() {
  function markFunction (line 923) | function markFunction( fn ) {
  function assert (line 932) | function assert( fn ) {
  function addHandle (line 956) | function addHandle( attrs, handler ) {
  function siblingCheck (line 971) | function siblingCheck( a, b ) {
  function createInputPseudo (line 997) | function createInputPseudo( type ) {
  function createButtonPseudo (line 1008) | function createButtonPseudo( type ) {
  function createDisabledPseudo (line 1019) | function createDisabledPseudo( disabled ) {
  function createPositionalPseudo (line 1075) | function createPositionalPseudo( fn ) {
  function testContext (line 1098) | function testContext( context ) {
  function setFilters (line 2309) | function setFilters() {}
  function toSelector (line 2383) | function toSelector( tokens ) {
  function addCombinator (line 2393) | function addCombinator( matcher, combinator, base ) {
  function elementMatcher (line 2460) | function elementMatcher( matchers ) {
  function multipleContexts (line 2474) | function multipleContexts( selector, contexts, results ) {
  function condense (line 2483) | function condense( unmatched, map, filter, context, xml ) {
  function setMatcher (line 2504) | function setMatcher( preFilter, selector, matcher, postFilter, postFinde...
  function matcherFromTokens (line 2604) | function matcherFromTokens( tokens ) {
  function matcherFromGroupMatchers (line 2667) | function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
  function nodeName (line 3025) | function nodeName( elem, name ) {
  function winnow (line 3035) | function winnow( elements, qualifier, not ) {
  function sibling (line 3330) | function sibling( cur, dir ) {
  function createOptions (line 3423) | function createOptions( options ) {
  function Identity (line 3648) | function Identity( v ) {
  function Thrower (line 3651) | function Thrower( ex ) {
  function adoptValue (line 3655) | function adoptValue( value, resolve, reject, noValue ) {
  function resolve (line 3748) | function resolve( depth, deferred, handler, special ) {
  function completed (line 4113) | function completed() {
  function fcamelCase (line 4208) | function fcamelCase( _all, letter ) {
  function camelCase (line 4215) | function camelCase( string ) {
  function Data (line 4232) | function Data() {
  function getData (line 4401) | function getData( data ) {
  function dataAttr (line 4426) | function dataAttr( elem, key, data ) {
  function adjustCSS (line 4738) | function adjustCSS( elem, prop, valueParts, tween ) {
  function getDefaultDisplay (line 4806) | function getDefaultDisplay( elem ) {
  function showHide (line 4829) | function showHide( elements, show ) {
  function getAll (line 4961) | function getAll( context, tag ) {
  function setGlobalEval (line 4986) | function setGlobalEval( elems, refElements ) {
  function buildFragment (line 5002) | function buildFragment( elems, context, scripts, selection, ignored ) {
  function returnTrue (line 5097) | function returnTrue() {
  function returnFalse (line 5101) | function returnFalse() {
  function expectSync (line 5111) | function expectSync( elem, type ) {
  function safeActiveElement (line 5118) | function safeActiveElement() {
  function on (line 5124) | function on( elem, types, selector, data, fn, one ) {
  function leverageNative (line 5612) | function leverageNative( el, type, expectSync ) {
  function manipulationTarget (line 5976) | function manipulationTarget( elem, content ) {
  function disableScript (line 5987) | function disableScript( elem ) {
  function restoreScript (line 5991) | function restoreScript( elem ) {
  function cloneCopyEvent (line 6001) | function cloneCopyEvent( src, dest ) {
  function fixInput (line 6034) | function fixInput( src, dest ) {
  function domManip (line 6047) | function domManip( collection, args, callback, ignored ) {
  function remove (line 6139) | function remove( elem, selector, keepData ) {
  function computeStyleTests (line 6453) | function computeStyleTests() {
  function roundPixelMeasures (line 6497) | function roundPixelMeasures( measure ) {
  function curCSS (line 6571) | function curCSS( elem, name, computed ) {
  function addGetHookIf (line 6624) | function addGetHookIf( conditionFn, hookFn ) {
  function vendorPropName (line 6649) | function vendorPropName( name ) {
  function finalPropName (line 6664) | function finalPropName( name ) {
  function setPositiveNumber (line 6690) | function setPositiveNumber( _elem, value, subtract ) {
  function boxModelAdjustment (line 6702) | function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, ...
  function getWidthOrHeight (line 6770) | function getWidthOrHeight( elem, dimension, extra ) {
  function Tween (line 7146) | function Tween( elem, options, prop, end, easing ) {
  function schedule (line 7269) | function schedule() {
  function createFxNow (line 7282) | function createFxNow() {
  function genFx (line 7290) | function genFx( type, includeWidth ) {
  function createTween (line 7310) | function createTween( value, prop, animation ) {
  function defaultPrefilter (line 7324) | function defaultPrefilter( elem, props, opts ) {
  function propFilter (line 7496) | function propFilter( props, specialEasing ) {
  function Animation (line 7533) | function Animation( elem, properties, options ) {
  function stripAndCollapse (line 8248) | function stripAndCollapse( value ) {
  function getClass (line 8254) | function getClass( elem ) {
  function classesToArray (line 8258) | function classesToArray( value ) {
  function buildParams (line 8885) | function buildParams( prefix, obj, traditional, add ) {
  function addToPrefiltersOrTransports (line 9039) | function addToPrefiltersOrTransports( structure ) {
  function inspectPrefiltersOrTransports (line 9073) | function inspectPrefiltersOrTransports( structure, options, originalOpti...
  function ajaxExtend (line 9102) | function ajaxExtend( target, src ) {
  function ajaxHandleResponses (line 9122) | function ajaxHandleResponses( s, jqXHR, responses ) {
  function ajaxConvert (line 9180) | function ajaxConvert( s, response, jqXHR, isSuccess ) {
  function done (line 9696) | function done( status, nativeStatusText, responses, headers ) {

FILE: docs/_static/jquery-3.6.0.js
  function DOMEval (line 107) | function DOMEval( code, node, doc ) {
  function toType (line 137) | function toType( obj ) {
  function isArrayLike (line 507) | function isArrayLike( obj ) {
  function Sizzle (line 759) | function Sizzle( selector, context, results, seed ) {
  function createCache (line 907) | function createCache() {
  function markFunction (line 927) | function markFunction( fn ) {
  function assert (line 936) | function assert( fn ) {
  function addHandle (line 960) | function addHandle( attrs, handler ) {
  function siblingCheck (line 975) | function siblingCheck( a, b ) {
  function createInputPseudo (line 1001) | function createInputPseudo( type ) {
  function createButtonPseudo (line 1012) | function createButtonPseudo( type ) {
  function createDisabledPseudo (line 1023) | function createDisabledPseudo( disabled ) {
  function createPositionalPseudo (line 1079) | function createPositionalPseudo( fn ) {
  function testContext (line 1102) | function testContext( context ) {
  function setFilters (line 2313) | function setFilters() {}
  function toSelector (line 2387) | function toSelector( tokens ) {
  function addCombinator (line 2397) | function addCombinator( matcher, combinator, base ) {
  function elementMatcher (line 2464) | function elementMatcher( matchers ) {
  function multipleContexts (line 2478) | function multipleContexts( selector, contexts, results ) {
  function condense (line 2487) | function condense( unmatched, map, filter, context, xml ) {
  function setMatcher (line 2508) | function setMatcher( preFilter, selector, matcher, postFilter, postFinde...
  function matcherFromTokens (line 2608) | function matcherFromTokens( tokens ) {
  function matcherFromGroupMatchers (line 2671) | function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
  function nodeName (line 3029) | function nodeName( elem, name ) {
  function winnow (line 3039) | function winnow( elements, qualifier, not ) {
  function sibling (line 3334) | function sibling( cur, dir ) {
  function createOptions (line 3427) | function createOptions( options ) {
  function Identity (line 3652) | function Identity( v ) {
  function Thrower (line 3655) | function Thrower( ex ) {
  function adoptValue (line 3659) | function adoptValue( value, resolve, reject, noValue ) {
  function resolve (line 3752) | function resolve( depth, deferred, handler, special ) {
  function completed (line 4117) | function completed() {
  function fcamelCase (line 4212) | function fcamelCase( _all, letter ) {
  function camelCase (line 4219) | function camelCase( string ) {
  function Data (line 4236) | function Data() {
  function getData (line 4405) | function getData( data ) {
  function dataAttr (line 4430) | function dataAttr( elem, key, data ) {
  function adjustCSS (line 4742) | function adjustCSS( elem, prop, valueParts, tween ) {
  function getDefaultDisplay (line 4810) | function getDefaultDisplay( elem ) {
  function showHide (line 4833) | function showHide( elements, show ) {
  function getAll (line 4965) | function getAll( context, tag ) {
  function setGlobalEval (line 4990) | function setGlobalEval( elems, refElements ) {
  function buildFragment (line 5006) | function buildFragment( elems, context, scripts, selection, ignored ) {
  function returnTrue (line 5098) | function returnTrue() {
  function returnFalse (line 5102) | function returnFalse() {
  function expectSync (line 5112) | function expectSync( elem, type ) {
  function safeActiveElement (line 5119) | function safeActiveElement() {
  function on (line 5125) | function on( elem, types, selector, data, fn, one ) {
  function leverageNative (line 5613) | function leverageNative( el, type, expectSync ) {
  function manipulationTarget (line 5962) | function manipulationTarget( elem, content ) {
  function disableScript (line 5973) | function disableScript( elem ) {
  function restoreScript (line 5977) | function restoreScript( elem ) {
  function cloneCopyEvent (line 5987) | function cloneCopyEvent( src, dest ) {
  function fixInput (line 6020) | function fixInput( src, dest ) {
  function domManip (line 6033) | function domManip( collection, args, callback, ignored ) {
  function remove (line 6125) | function remove( elem, selector, keepData ) {
  function computeStyleTests (line 6439) | function computeStyleTests() {
  function roundPixelMeasures (line 6483) | function roundPixelMeasures( measure ) {
  function curCSS (line 6576) | function curCSS( elem, name, computed ) {
  function addGetHookIf (line 6629) | function addGetHookIf( conditionFn, hookFn ) {
  function vendorPropName (line 6654) | function vendorPropName( name ) {
  function finalPropName (line 6669) | function finalPropName( name ) {
  function setPositiveNumber (line 6695) | function setPositiveNumber( _elem, value, subtract ) {
  function boxModelAdjustment (line 6707) | function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, ...
  function getWidthOrHeight (line 6775) | function getWidthOrHeight( elem, dimension, extra ) {
  function Tween (line 7151) | function Tween( elem, options, prop, end, easing ) {
  function schedule (line 7274) | function schedule() {
  function createFxNow (line 7287) | function createFxNow() {
  function genFx (line 7295) | function genFx( type, includeWidth ) {
  function createTween (line 7315) | function createTween( value, prop, animation ) {
  function defaultPrefilter (line 7329) | function defaultPrefilter( elem, props, opts ) {
  function propFilter (line 7501) | function propFilter( props, specialEasing ) {
  function Animation (line 7538) | function Animation( elem, properties, options ) {
  function stripAndCollapse (line 8254) | function stripAndCollapse( value ) {
  function getClass (line 8260) | function getClass( elem ) {
  function classesToArray (line 8264) | function classesToArray( value ) {
  function buildParams (line 8894) | function buildParams( prefix, obj, traditional, add ) {
  function addToPrefiltersOrTransports (line 9047) | function addToPrefiltersOrTransports( structure ) {
  function inspectPrefiltersOrTransports (line 9081) | function inspectPrefiltersOrTransports( structure, options, originalOpti...
  function ajaxExtend (line 9110) | function ajaxExtend( target, src ) {
  function ajaxHandleResponses (line 9130) | function ajaxHandleResponses( s, jqXHR, responses ) {
  function ajaxConvert (line 9188) | function ajaxConvert( s, response, jqXHR, isSuccess ) {
  function done (line 9704) | function done( status, nativeStatusText, responses, headers ) {

FILE: docs/_static/jquery.js
  function p (line 2) | function p(a,b){b=b||d;var c=b.createElement("script");c.text=a,b.head.a...
  function w (line 2) | function w(a){var b=!!a&&"length"in a&&a.length,c=r.type(a);return"funct...
  function ga (line 2) | function ga(a,b,d,e){var f,h,j,k,l,o,r,s=b&&b.ownerDocument,w=b?b.nodeTy...
  function ha (line 2) | function ha(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLengt...
  function ia (line 2) | function ia(a){return a[u]=!0,a}
  function ja (line 2) | function ja(a){var b=n.createElement("fieldset");try{return!!a(b)}catch(...
  function ka (line 2) | function ka(a,b){var c=a.split("|"),e=c.length;while(e--)d.attrHandle[c[...
  function la (line 2) | function la(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&a.sourc...
  function ma (line 2) | function ma(a){return function(b){var c=b.nodeName.toLowerCase();return"...
  function na (line 2) | function na(a){return function(b){var c=b.nodeName.toLowerCase();return(...
  function oa (line 2) | function oa(a){return function(b){return"label"in b&&b.disabled===a||"fo...
  function pa (line 2) | function pa(a){return ia(function(b){return b=+b,ia(function(c,d){var e,...
  function qa (line 2) | function qa(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}
  function ra (line 2) | function ra(){}
  function sa (line 2) | function sa(a){for(var b=0,c=a.length,d="";b<c;b++)d+=a[b].value;return d}
  function ta (line 2) | function ta(a,b,c){var d=b.dir,e=b.next,f=e||d,g=c&&"parentNode"===f,h=x...
  function ua (line 2) | function ua(a){return a.length>1?function(b,c,d){var e=a.length;while(e-...
  function va (line 2) | function va(a,b,c){for(var d=0,e=b.length;d<e;d++)ga(a,b[d],c);return c}
  function wa (line 2) | function wa(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;h<i;h++)(...
  function xa (line 2) | function xa(a,b,c,d,e,f){return d&&!d[u]&&(d=xa(d)),e&&!e[u]&&(e=xa(e,f)...
  function ya (line 2) | function ya(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.r...
  function za (line 2) | function za(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var...
  function D (line 2) | function D(a,b,c){if(r.isFunction(b))return r.grep(a,function(a,d){retur...
  function J (line 2) | function J(a,b){while((a=a[b])&&1!==a.nodeType);return a}
  function L (line 2) | function L(a){var b={};return r.each(a.match(K)||[],function(a,c){b[c]=!...
  function M (line 2) | function M(a){return a}
  function N (line 2) | function N(a){throw a}
  function O (line 2) | function O(a,b,c){var d;try{a&&r.isFunction(d=a.promise)?d.call(a).done(...
  function g (line 2) | function g(b,c,d,e){return function(){var h=this,i=arguments,j=function(...
  function R (line 2) | function R(){d.removeEventListener("DOMContentLoaded",R),a.removeEventLi...
  function U (line 3) | function U(){this.expando=r.expando+U.uid++}
  function Z (line 3) | function Z(a,b,c){var d;if(void 0===c&&1===a.nodeType)if(d="data-"+b.rep...
  function da (line 3) | function da(a,b,c,d){var e,f=1,g=20,h=d?function(){return d.cur()}:funct...
  function fa (line 3) | function fa(a){var b,c=a.ownerDocument,d=a.nodeName,e=ea[d];return e?e:(...
  function ga (line 3) | function ga(a,b){for(var c,d,e=[],f=0,g=a.length;f<g;f++)d=a[f],d.style&...
  function la (line 3) | function la(a,b){var c="undefined"!=typeof a.getElementsByTagName?a.getE...
  function ma (line 3) | function ma(a,b){for(var c=0,d=a.length;c<d;c++)V.set(a[c],"globalEval",...
  function oa (line 3) | function oa(a,b,c,d,e){for(var f,g,h,i,j,k,l=b.createDocumentFragment(),...
  function ta (line 3) | function ta(){return!0}
  function ua (line 3) | function ua(){return!1}
  function va (line 3) | function va(){try{return d.activeElement}catch(a){}}
  function wa (line 3) | function wa(a,b,c,d,e,f){var g,h;if("object"==typeof b){"string"!=typeof...
  function Ca (line 3) | function Ca(a,b){return r.nodeName(a,"table")&&r.nodeName(11!==b.nodeTyp...
  function Da (line 3) | function Da(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}
  function Ea (line 3) | function Ea(a){var b=Aa.exec(a.type);return b?a.type=b[1]:a.removeAttrib...
  function Fa (line 3) | function Fa(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(V.hasData(a)&...
  function Ga (line 3) | function Ga(a,b){var c=b.nodeName.toLowerCase();"input"===c&&ha.test(a.t...
  function Ha (line 3) | function Ha(a,b,c,d){b=g.apply([],b);var e,f,h,i,j,k,l=0,m=a.length,n=m-...
  function Ia (line 3) | function Ia(a,b,c){for(var d,e=b?r.filter(b,a):a,f=0;null!=(d=e[f]);f++)...
  function b (line 3) | function b(){if(i){i.style.cssText="box-sizing:border-box;position:relat...
  function Ma (line 3) | function Ma(a,b,c){var d,e,f,g,h=a.style;return c=c||La(a),c&&(g=c.getPr...
  function Na (line 3) | function Na(a,b){return{get:function(){return a()?void delete this.get:(...
  function Ta (line 3) | function Ta(a){if(a in Sa)return a;var b=a[0].toUpperCase()+a.slice(1),c...
  function Ua (line 3) | function Ua(a,b,c){var d=_.exec(b);return d?Math.max(0,d[2]-(c||0))+(d[3...
  function Va (line 3) | function Va(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===...
  function Wa (line 3) | function Wa(a,b,c){var d,e=!0,f=La(a),g="border-box"===r.css(a,"boxSizin...
  function Xa (line 3) | function Xa(a,b,c,d,e){return new Xa.prototype.init(a,b,c,d,e)}
  function ab (line 3) | function ab(){Za&&(a.requestAnimationFrame(ab),r.fx.tick())}
  function bb (line 3) | function bb(){return a.setTimeout(function(){Ya=void 0}),Ya=r.now()}
  function cb (line 3) | function cb(a,b){var c,d=0,e={height:a};for(b=b?1:0;d<4;d+=2-b)c=aa[d],e...
  function db (line 3) | function db(a,b,c){for(var d,e=(gb.tweeners[b]||[]).concat(gb.tweeners["...
  function eb (line 3) | function eb(a,b,c){var d,e,f,g,h,i,j,k,l="width"in b||"height"in b,m=thi...
  function fb (line 3) | function fb(a,b){var c,d,e,f,g;for(c in a)if(d=r.camelCase(c),e=b[d],f=a...
  function gb (line 3) | function gb(a,b,c){var d,e,f=0,g=gb.prefilters.length,h=r.Deferred().alw...
  function mb (line 4) | function mb(a){return a.getAttribute&&a.getAttribute("class")||""}
  function xb (line 4) | function xb(a,b,c,d){var e;if(r.isArray(b))r.each(b,function(b,e){c||tb....
  function Jb (line 4) | function Jb(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var ...
  function Kb (line 4) | function Kb(a,b,c,d){var e={},f=a===Gb;function g(h){var i;return e[h]=!...
  function Lb (line 4) | function Lb(a,b){var c,d,e=r.ajaxSettings.flatOptions||{};for(c in b)voi...
  function Mb (line 4) | function Mb(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[...
  function Nb (line 4) | function Nb(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])fo...
  function A (line 4) | function A(b,c,d,h){var j,m,n,v,w,x=c;k||(k=!0,i&&a.clearTimeout(i),e=vo...
  function Sb (line 4) | function Sb(a){return r.isWindow(a)?a:9===a.nodeType&&a.defaultView}

FILE: docs/_static/js/theme.js
  function s (line 1) | function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&re...
  function ThemeNav (line 5) | function ThemeNav () {

FILE: docs/_static/searchtools.js
  function splitQuery (line 307) | function splitQuery(query) {
  function pulse (line 381) | function pulse() {
  function displayNextItem (line 513) | function displayNextItem() {

FILE: docs/_static/sphinx_highlight.js
  constant SPHINX_HIGHLIGHT_ENABLED (line 4) | const SPHINX_HIGHLIGHT_ENABLED = true

FILE: docs/_static/underscore-1.13.1.js
  function restArguments (line 64) | function restArguments(func, startIndex) {
  function isObject (line 88) | function isObject(obj) {
  function isNull (line 94) | function isNull(obj) {
  function isUndefined (line 99) | function isUndefined(obj) {
  function isBoolean (line 104) | function isBoolean(obj) {
  function isElement (line 109) | function isElement(obj) {
  function tagTester (line 114) | function tagTester(name) {
  function ie10IsDataView (line 162) | function ie10IsDataView(obj) {
  function has$1 (line 173) | function has$1(obj, key) {
  function isFinite$1 (line 192) | function isFinite$1(obj) {
  function isNaN$1 (line 197) | function isNaN$1(obj) {
  function constant (line 202) | function constant(value) {
  function createSizePropertyCheck (line 209) | function createSizePropertyCheck(getSizeProperty) {
  function shallowProperty (line 217) | function shallowProperty(key) {
  function isTypedArray (line 232) | function isTypedArray(obj) {
  function emulatedSet (line 248) | function emulatedSet(keys) {
  function collectNonEnumProps (line 263) | function collectNonEnumProps(obj, keys) {
  function keys (line 283) | function keys(obj) {
  function isEmpty (line 295) | function isEmpty(obj) {
  function isMatch (line 307) | function isMatch(object, attrs) {
  function _$1 (line 321) | function _$1(obj) {
  function toBufferView (line 344) | function toBufferView(bufferSource) {
  function eq (line 356) | function eq(a, b, aStack, bStack) {
  function deepEq (line 371) | function deepEq(a, b, aStack, bStack) {
  function isEqual (line 476) | function isEqual(a, b) {
  function allKeys (line 481) | function allKeys(obj) {
  function ie11fingerprint (line 494) | function ie11fingerprint(methods) {
  function values (line 533) | function values(obj) {
  function pairs (line 545) | function pairs(obj) {
  function invert (line 556) | function invert(obj) {
  function functions (line 566) | function functions(obj) {
  function createAssigner (line 575) | function createAssigner(keysFunc, defaults) {
  function ctor (line 605) | function ctor() {
  function baseCreate (line 610) | function baseCreate(prototype) {
  function create (line 623) | function create(prototype, props) {
  function clone (line 630) | function clone(obj) {
  function tap (line 638) | function tap(obj, interceptor) {
  function toPath$1 (line 645) | function toPath$1(path) {
  function toPath (line 652) | function toPath(path) {
  function deepGet (line 657) | function deepGet(obj, path) {
  function get (line 670) | function get(object, path, defaultValue) {
  function has (line 678) | function has(obj, path) {
  function identity (line 690) | function identity(value) {
  function matcher (line 696) | function matcher(attrs) {
  function property (line 705) | function property(path) {
  function optimizeCb (line 715) | function optimizeCb(func, context, argCount) {
  function baseIteratee (line 737) | function baseIteratee(value, context, argCount) {
  function iteratee (line 747) | function iteratee(value, context) {
  function cb (line 754) | function cb(value, context, argCount) {
  function mapObject (line 761) | function mapObject(obj, iteratee, context) {
  function noop (line 774) | function noop(){}
  function propertyOf (line 777) | function propertyOf(obj) {
  function times (line 785) | function times(n, iteratee, context) {
  function random (line 793) | function random(min, max) {
  function createEscaper (line 808) | function createEscaper(map) {
  function escapeChar (line 867) | function escapeChar(match) {
  function template (line 882) | function template(text, settings, oldSettings) {
  function result (line 950) | function result(obj, path, fallback) {
  function uniqueId (line 970) | function uniqueId(prefix) {
  function chain (line 976) | function chain(obj) {
  function executeBound (line 985) | function executeBound(sourceFunc, boundFunc, context, callingContext, ar...
  function flatten$1 (line 1030) | function flatten$1(input, depth, strict, output) {
  function memoize (line 1071) | function memoize(func, hasher) {
  function throttle (line 1099) | function throttle(func, wait, options) {
  function debounce (line 1144) | function debounce(func, wait, immediate) {
  function wrap (line 1181) | function wrap(func, wrapper) {
  function negate (line 1186) | function negate(predicate) {
  function compose (line 1194) | function compose() {
  function after (line 1206) | function after(times, func) {
  function before (line 1216) | function before(times, func) {
  function findKey (line 1232) | function findKey(obj, predicate, context) {
  function createPredicateIndexFinder (line 1242) | function createPredicateIndexFinder(dir) {
  function sortedIndex (line 1262) | function sortedIndex(array, obj, iteratee, context) {
  function createIndexFinder (line 1274) | function createIndexFinder(dir, predicateFind, sortedIndex) {
  function find (line 1309) | function find(obj, predicate, context) {
  function findWhere (line 1317) | function findWhere(obj, attrs) {
  function each (line 1325) | function each(obj, iteratee, context) {
  function map (line 1342) | function map(obj, iteratee, context) {
  function createReduce (line 1355) | function createReduce(dir) {
  function filter (line 1387) | function filter(obj, predicate, context) {
  function reject (line 1397) | function reject(obj, predicate, context) {
  function every (line 1402) | function every(obj, predicate, context) {
  function some (line 1414) | function some(obj, predicate, context) {
  function contains (line 1426) | function contains(obj, item, fromIndex, guard) {
  function pluck (line 1456) | function pluck(obj, key) {
  function where (line 1462) | function where(obj, attrs) {
  function max (line 1467) | function max(obj, iteratee, context) {
  function min (line 1492) | function min(obj, iteratee, context) {
  function sample (line 1520) | function sample(obj, n, guard) {
  function shuffle (line 1539) | function shuffle(obj) {
  function sortBy (line 1544) | function sortBy(obj, iteratee, context) {
  function group (line 1565) | function group(behavior, partition) {
  function toArray (line 1604) | function toArray(obj) {
  function size (line 1616) | function size(obj) {
  function keyInObj (line 1623) | function keyInObj(value, key, obj) {
  function initial (line 1665) | function initial(array, n, guard) {
  function first (line 1671) | function first(array, n, guard) {
  function rest (line 1680) | function rest(array, n, guard) {
  function last (line 1686) | function last(array, n, guard) {
  function compact (line 1693) | function compact(array) {
  function flatten (line 1699) | function flatten(array, depth) {
  function uniq (line 1722) | function uniq(array, isSorted, iteratee, context) {
  function intersection (line 1757) | function intersection(array) {
  function unzip (line 1774) | function unzip(array) {
  function object (line 1791) | function object(list, values) {
  function range (line 1806) | function range(start, stop, step) {
  function chunk (line 1827) | function chunk(array, count) {
  function chainResult (line 1838) | function chainResult(instance, obj) {
  function mixin (line 1843) | function mixin(obj) {

FILE: docs/_static/underscore-1.3.1.js
  function eq (line 669) | function eq(a, b, stack) {

FILE: docs/_static/underscore.js
  function q (line 8) | function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)re...

FILE: docs/_static/websupport.js
  function init (line 47) | function init() {
  function initEvents (line 52) | function initEvents() {
  function setComparator (line 107) | function setComparator() {
  function initComparator (line 127) | function initComparator() {
  function show (line 147) | function show(id) {
  function hide (line 168) | function hide(id) {
  function getComments (line 181) | function getComments(id) {
  function addComment (line 216) | function addComment(form) {
  function appendComments (line 275) | function appendComments(comments, ul) {
  function insertComment (line 290) | function insertComment(comment) {
  function acceptComment (line 320) | function acceptComment(id) {
  function deleteComment (line 335) | function deleteComment(id) {
  function showProposal (line 369) | function showProposal(id) {
  function hideProposal (line 375) | function hideProposal(id) {
  function showProposeChange (line 381) | function showProposeChange(id) {
  function hideProposeChange (line 390) | function hideProposeChange(id) {
  function toggleCommentMarkupBox (line 398) | function toggleCommentMarkupBox(id) {
  function handleReSort (line 403) | function handleReSort(link) {
  function handleVote (line 426) | function handleVote(link) {
  function openReply (line 488) | function openReply(id) {
  function closeReply (line 516) | function closeReply(id) {
  function sortComments (line 530) | function sortComments(comments) {
  function getChildren (line 542) | function getChildren(ul, recursive) {
  function createCommentDiv (line 555) | function createCommentDiv(comment) {
  function renderTemplate (line 593) | function renderTemplate(template, context) {
  function showError (line 610) | function showError(message) {

FILE: docs/sphinx/_build/_static/_sphinx_javascript_frameworks_compat.js
  function highlight (line 62) | function highlight(node, addItems) {

FILE: docs/sphinx/_build/_static/doctools.js
  function highlight (line 69) | function highlight(node) {

FILE: docs/sphinx/_build/_static/jquery-3.1.0.js
  function DOMEval (line 77) | function DOMEval( code, doc ) {
  function isArrayLike (line 528) | function isArrayLike( obj ) {
  function Sizzle (line 760) | function Sizzle( selector, context, results, seed ) {
  function createCache (line 899) | function createCache() {
  function markFunction (line 917) | function markFunction( fn ) {
  function assert (line 926) | function assert( fn ) {
  function addHandle (line 948) | function addHandle( attrs, handler ) {
  function siblingCheck (line 963) | function siblingCheck( a, b ) {
  function createInputPseudo (line 989) | function createInputPseudo( type ) {
  function createButtonPseudo (line 1000) | function createButtonPseudo( type ) {
  function createDisabledPseudo (line 1011) | function createDisabledPseudo( disabled ) {
  function createPositionalPseudo (line 1039) | function createPositionalPseudo( fn ) {
  function testContext (line 1062) | function testContext( context ) {
  function setFilters (line 2118) | function setFilters() {}
  function toSelector (line 2189) | function toSelector( tokens ) {
  function addCombinator (line 2199) | function addCombinator( matcher, combinator, base ) {
  function elementMatcher (line 2261) | function elementMatcher( matchers ) {
  function multipleContexts (line 2275) | function multipleContexts( selector, contexts, results ) {
  function condense (line 2284) | function condense( unmatched, map, filter, context, xml ) {
  function setMatcher (line 2305) | function setMatcher( preFilter, selector, matcher, postFilter, postFinde...
  function matcherFromTokens (line 2398) | function matcherFromTokens( tokens ) {
  function matcherFromGroupMatchers (line 2456) | function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
  function winnow (line 2798) | function winnow( elements, qualifier, not ) {
  function sibling (line 3094) | function sibling( cur, dir ) {
  function createOptions (line 3170) | function createOptions( options ) {
  function Identity (line 3395) | function Identity( v ) {
  function Thrower (line 3398) | function Thrower( ex ) {
  function adoptValue (line 3402) | function adoptValue( value, resolve, reject ) {
  function resolve (line 3494) | function resolve( depth, deferred, handler, special ) {
  function completed (line 3860) | function completed() {
  function Data (line 3959) | function Data() {
  function dataAttr (line 4128) | function dataAttr( elem, key, data ) {
  function adjustCSS (line 4448) | function adjustCSS( elem, prop, valueParts, tween ) {
  function getDefaultDisplay (line 4513) | function getDefaultDisplay( elem ) {
  function showHide (line 4536) | function showHide( elements, show ) {
  function getAll (line 4637) | function getAll( context, tag ) {
  function setGlobalEval (line 4654) | function setGlobalEval( elems, refElements ) {
  function buildFragment (line 4670) | function buildFragment( elems, context, scripts, selection, ignored ) {
  function returnTrue (line 4793) | function returnTrue() {
  function returnFalse (line 4797) | function returnFalse() {
  function safeActiveElement (line 4803) | function safeActiveElement() {
  function on (line 4809) | function on( elem, types, selector, data, fn, one ) {
  function manipulationTarget (line 5518) | function manipulationTarget( elem, content ) {
  function disableScript (line 5529) | function disableScript( elem ) {
  function restoreScript (line 5533) | function restoreScript( elem ) {
  function cloneCopyEvent (line 5545) | function cloneCopyEvent( src, dest ) {
  function fixInput (line 5580) | function fixInput( src, dest ) {
  function domManip (line 5593) | function domManip( collection, args, callback, ignored ) {
  function remove (line 5683) | function remove( elem, selector, keepData ) {
  function computeStyleTests (line 5976) | function computeStyleTests() {
  function curCSS (line 6050) | function curCSS( elem, name, computed ) {
  function addGetHookIf (line 6097) | function addGetHookIf( conditionFn, hookFn ) {
  function vendorPropName (line 6133) | function vendorPropName( name ) {
  function setPositiveNumber (line 6152) | function setPositiveNumber( elem, value, subtract ) {
  function augmentWidthOrHeight (line 6164) | function augmentWidthOrHeight( elem, name, extra, isBorderBox, styles ) {
  function getWidthOrHeight (line 6208) | function getWidthOrHeight( elem, name, extra ) {
  function Tween (line 6516) | function Tween( elem, options, prop, end, easing ) {
  function raf (line 6639) | function raf() {
  function createFxNow (line 6647) | function createFxNow() {
  function genFx (line 6655) | function genFx( type, includeWidth ) {
  function createTween (line 6675) | function createTween( value, prop, animation ) {
  function defaultPrefilter (line 6689) | function defaultPrefilter( elem, props, opts ) {
  function propFilter (line 6860) | function propFilter( props, specialEasing ) {
  function Animation (line 6897) | function Animation( elem, properties, options ) {
  function getClass (line 7588) | function getClass( elem ) {
  function buildParams (line 8213) | function buildParams( prefix, obj, traditional, add ) {
  function addToPrefiltersOrTransports (line 8359) | function addToPrefiltersOrTransports( structure ) {
  function inspectPrefiltersOrTransports (line 8393) | function inspectPrefiltersOrTransports( structure, options, originalOpti...
  function ajaxExtend (line 8422) | function ajaxExtend( target, src ) {
  function ajaxHandleResponses (line 8442) | function ajaxHandleResponses( s, jqXHR, responses ) {
  function ajaxConvert (line 8500) | function ajaxConvert( s, response, jqXHR, isSuccess ) {
  function done (line 9013) | function done( status, nativeStatusText, responses, headers ) {
  function getWindow (line 9738) | function getWindow( elem ) {

FILE: docs/sphinx/_build/_static/jquery-3.6.0.js
  function DOMEval (line 107) | function DOMEval( code, node, doc ) {
  function toType (line 137) | function toType( obj ) {
  function isArrayLike (line 507) | function isArrayLike( obj ) {
  function Sizzle (line 759) | function Sizzle( selector, context, results, seed ) {
  function createCache (line 907) | function createCache() {
  function markFunction (line 927) | function markFunction( fn ) {
  function assert (line 936) | function assert( fn ) {
  function addHandle (line 960) | function addHandle( attrs, handler ) {
  function siblingCheck (line 975) | function siblingCheck( a, b ) {
  function createInputPseudo (line 1001) | function createInputPseudo( type ) {
  function createButtonPseudo (line 1012) | function createButtonPseudo( type ) {
  function createDisabledPseudo (line 1023) | function createDisabledPseudo( disabled ) {
  function createPositionalPseudo (line 1079) | function createPositionalPseudo( fn ) {
  function testContext (line 1102) | function testContext( context ) {
  function setFilters (line 2313) | function setFilters() {}
  function toSelector (line 2387) | function toSelector( tokens ) {
  function addCombinator (line 2397) | function addCombinator( matcher, combinator, base ) {
  function elementMatcher (line 2464) | function elementMatcher( matchers ) {
  function multipleContexts (line 2478) | function multipleContexts( selector, contexts, results ) {
  function condense (line 2487) | function condense( unmatched, map, filter, context, xml ) {
  function setMatcher (line 2508) | function setMatcher( preFilter, selector, matcher, postFilter, postFinde...
  function matcherFromTokens (line 2608) | function matcherFromTokens( tokens ) {
  function matcherFromGroupMatchers (line 2671) | function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
  function nodeName (line 3029) | function nodeName( elem, name ) {
  function winnow (line 3039) | function winnow( elements, qualifier, not ) {
  function sibling (line 3334) | function sibling( cur, dir ) {
  function createOptions (line 3427) | function createOptions( options ) {
  function Identity (line 3652) | function Identity( v ) {
  function Thrower (line 3655) | function Thrower( ex ) {
  function adoptValue (line 3659) | function adoptValue( value, resolve, reject, noValue ) {
  function resolve (line 3752) | function resolve( depth, deferred, handler, special ) {
  function completed (line 4117) | function completed() {
  function fcamelCase (line 4212) | function fcamelCase( _all, letter ) {
  function camelCase (line 4219) | function camelCase( string ) {
  function Data (line 4236) | function Data() {
  function getData (line 4405) | function getData( data ) {
  function dataAttr (line 4430) | function dataAttr( elem, key, data ) {
  function adjustCSS (line 4742) | function adjustCSS( elem, prop, valueParts, tween ) {
  function getDefaultDisplay (line 4810) | function getDefaultDisplay( elem ) {
  function showHide (line 4833) | function showHide( elements, show ) {
  function getAll (line 4965) | function getAll( context, tag ) {
  function setGlobalEval (line 4990) | function setGlobalEval( elems, refElements ) {
  function buildFragment (line 5006) | function buildFragment( elems, context, scripts, selection, ignored ) {
  function returnTrue (line 5098) | function returnTrue() {
  function returnFalse (line 5102) | function returnFalse() {
  function expectSync (line 5112) | function expectSync( elem, type ) {
  function safeActiveElement (line 5119) | function safeActiveElement() {
  function on (line 5125) | function on( elem, types, selector, data, fn, one ) {
  function leverageNative (line 5613) | function leverageNative( el, type, expectSync ) {
  function manipulationTarget (line 5962) | function manipulationTarget( elem, content ) {
  function disableScript (line 5973) | function disableScript( elem ) {
  function restoreScript (line 5977) | function restoreScript( elem ) {
  function cloneCopyEvent (line 5987) | function cloneCopyEvent( src, dest ) {
  function fixInput (line 6020) | function fixInput( src, dest ) {
  function domManip (line 6033) | function domManip( collection, args, callback, ignored ) {
  function remove (line 6125) | function remove( elem, selector, keepData ) {
  function computeStyleTests (line 6439) | function computeStyleTests() {
  function roundPixelMeasures (line 6483) | function roundPixelMeasures( measure ) {
  function curCSS (line 6576) | function curCSS( elem, name, computed ) {
  function addGetHookIf (line 6629) | function addGetHookIf( conditionFn, hookFn ) {
  function vendorPropName (line 6654) | function vendorPropName( name ) {
  function finalPropName (line 6669) | function finalPropName( name ) {
  function setPositiveNumber (line 6695) | function setPositiveNumber( _elem, value, subtract ) {
  function boxModelAdjustment (line 6707) | function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, ...
  function getWidthOrHeight (line 6775) | function getWidthOrHeight( elem, dimension, extra ) {
  function Tween (line 7151) | function Tween( elem, options, prop, end, easing ) {
  function schedule (line 7274) | function schedule() {
  function createFxNow (line 7287) | function createFxNow() {
  function genFx (line 7295) | function genFx( type, includeWidth ) {
  function createTween (line 7315) | function createTween( value, prop, animation ) {
  function defaultPrefilter (line 7329) | function defaultPrefilter( elem, props, opts ) {
  function propFilter (line 7501) | function propFilter( props, specialEasing ) {
  function Animation (line 7538) | function Animation( elem, properties, options ) {
  function stripAndCollapse (line 8254) | function stripAndCollapse( value ) {
  function getClass (line 8260) | function getClass( elem ) {
  function classesToArray (line 8264) | function classesToArray( value ) {
  function buildParams (line 8894) | function buildParams( prefix, obj, traditional, add ) {
  function addToPrefiltersOrTransports (line 9047) | function addToPrefiltersOrTransports( structure ) {
  function inspectPrefiltersOrTransports (line 9081) | function inspectPrefiltersOrTransports( structure, options, originalOpti...
  function ajaxExtend (line 9110) | function ajaxExtend( target, src ) {
  function ajaxHandleResponses (line 9130) | function ajaxHandleResponses( s, jqXHR, responses ) {
  function ajaxConvert (line 9188) | function ajaxConvert( s, response, jqXHR, isSuccess ) {
  function done (line 9704) | function done( status, nativeStatusText, responses, headers ) {

FILE: docs/sphinx/_build/_static/jquery.js
  function p (line 2) | function p(a,b){b=b||d;var c=b.createElement("script");c.text=a,b.head.a...
  function w (line 2) | function w(a){var b=!!a&&"length"in a&&a.length,c=r.type(a);return"funct...
  function ga (line 2) | function ga(a,b,d,e){var f,h,j,k,l,o,r,s=b&&b.ownerDocument,w=b?b.nodeTy...
  function ha (line 2) | function ha(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLengt...
  function ia (line 2) | function ia(a){return a[u]=!0,a}
  function ja (line 2) | function ja(a){var b=n.createElement("fieldset");try{return!!a(b)}catch(...
  function ka (line 2) | function ka(a,b){var c=a.split("|"),e=c.length;while(e--)d.attrHandle[c[...
  function la (line 2) | function la(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&a.sourc...
  function ma (line 2) | function ma(a){return function(b){var c=b.nodeName.toLowerCase();return"...
  function na (line 2) | function na(a){return function(b){var c=b.nodeName.toLowerCase();return(...
  function oa (line 2) | function oa(a){return function(b){return"label"in b&&b.disabled===a||"fo...
  function pa (line 2) | function pa(a){return ia(function(b){return b=+b,ia(function(c,d){var e,...
  function qa (line 2) | function qa(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}
  function ra (line 2) | function ra(){}
  function sa (line 2) | function sa(a){for(var b=0,c=a.length,d="";b<c;b++)d+=a[b].value;return d}
  function ta (line 2) | function ta(a,b,c){var d=b.dir,e=b.next,f=e||d,g=c&&"parentNode"===f,h=x...
  function ua (line 2) | function ua(a){return a.length>1?function(b,c,d){var e=a.length;while(e-...
  function va (line 2) | function va(a,b,c){for(var d=0,e=b.length;d<e;d++)ga(a,b[d],c);return c}
  function wa (line 2) | function wa(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;h<i;h++)(...
  function xa (line 2) | function xa(a,b,c,d,e,f){return d&&!d[u]&&(d=xa(d)),e&&!e[u]&&(e=xa(e,f)...
  function ya (line 2) | function ya(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.r...
  function za (line 2) | function za(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var...
  function D (line 2) | function D(a,b,c){if(r.isFunction(b))return r.grep(a,function(a,d){retur...
  function J (line 2) | function J(a,b){while((a=a[b])&&1!==a.nodeType);return a}
  function L (line 2) | function L(a){var b={};return r.each(a.match(K)||[],function(a,c){b[c]=!...
  function M (line 2) | function M(a){return a}
  function N (line 2) | function N(a){throw a}
  function O (line 2) | function O(a,b,c){var d;try{a&&r.isFunction(d=a.promise)?d.call(a).done(...
  function g (line 2) | function g(b,c,d,e){return function(){var h=this,i=arguments,j=function(...
  function R (line 2) | function R(){d.removeEventListener("DOMContentLoaded",R),a.removeEventLi...
  function U (line 3) | function U(){this.expando=r.expando+U.uid++}
  function Z (line 3) | function Z(a,b,c){var d;if(void 0===c&&1===a.nodeType)if(d="data-"+b.rep...
  function da (line 3) | function da(a,b,c,d){var e,f=1,g=20,h=d?function(){return d.cur()}:funct...
  function fa (line 3) | function fa(a){var b,c=a.ownerDocument,d=a.nodeName,e=ea[d];return e?e:(...
  function ga (line 3) | function ga(a,b){for(var c,d,e=[],f=0,g=a.length;f<g;f++)d=a[f],d.style&...
  function la (line 3) | function la(a,b){var c="undefined"!=typeof a.getElementsByTagName?a.getE...
  function ma (line 3) | function ma(a,b){for(var c=0,d=a.length;c<d;c++)V.set(a[c],"globalEval",...
  function oa (line 3) | function oa(a,b,c,d,e){for(var f,g,h,i,j,k,l=b.createDocumentFragment(),...
  function ta (line 3) | function ta(){return!0}
  function ua (line 3) | function ua(){return!1}
  function va (line 3) | function va(){try{return d.activeElement}catch(a){}}
  function wa (line 3) | function wa(a,b,c,d,e,f){var g,h;if("object"==typeof b){"string"!=typeof...
  function Ca (line 3) | function Ca(a,b){return r.nodeName(a,"table")&&r.nodeName(11!==b.nodeTyp...
  function Da (line 3) | function Da(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}
  function Ea (line 3) | function Ea(a){var b=Aa.exec(a.type);return b?a.type=b[1]:a.removeAttrib...
  function Fa (line 3) | function Fa(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(V.hasData(a)&...
  function Ga (line 3) | function Ga(a,b){var c=b.nodeName.toLowerCase();"input"===c&&ha.test(a.t...
  function Ha (line 3) | function Ha(a,b,c,d){b=g.apply([],b);var e,f,h,i,j,k,l=0,m=a.length,n=m-...
  function Ia (line 3) | function Ia(a,b,c){for(var d,e=b?r.filter(b,a):a,f=0;null!=(d=e[f]);f++)...
  function b (line 3) | function b(){if(i){i.style.cssText="box-sizing:border-box;position:relat...
  function Ma (line 3) | function Ma(a,b,c){var d,e,f,g,h=a.style;return c=c||La(a),c&&(g=c.getPr...
  function Na (line 3) | function Na(a,b){return{get:function(){return a()?void delete this.get:(...
  function Ta (line 3) | function Ta(a){if(a in Sa)return a;var b=a[0].toUpperCase()+a.slice(1),c...
  function Ua (line 3) | function Ua(a,b,c){var d=_.exec(b);return d?Math.max(0,d[2]-(c||0))+(d[3...
  function Va (line 3) | function Va(a,b,c,d,e){for(var f=c===(d?"border":"content")?4:"width"===...
  function Wa (line 3) | function Wa(a,b,c){var d,e=!0,f=La(a),g="border-box"===r.css(a,"boxSizin...
  function Xa (line 3) | function Xa(a,b,c,d,e){return new Xa.prototype.init(a,b,c,d,e)}
  function ab (line 3) | function ab(){Za&&(a.requestAnimationFrame(ab),r.fx.tick())}
  function bb (line 3) | function bb(){return a.setTimeout(function(){Ya=void 0}),Ya=r.now()}
  function cb (line 3) | function cb(a,b){var c,d=0,e={height:a};for(b=b?1:0;d<4;d+=2-b)c=aa[d],e...
  function db (line 3) | function db(a,b,c){for(var d,e=(gb.tweeners[b]||[]).concat(gb.tweeners["...
  function eb (line 3) | function eb(a,b,c){var d,e,f,g,h,i,j,k,l="width"in b||"height"in b,m=thi...
  function fb (line 3) | function fb(a,b){var c,d,e,f,g;for(c in a)if(d=r.camelCase(c),e=b[d],f=a...
  function gb (line 3) | function gb(a,b,c){var d,e,f=0,g=gb.prefilters.length,h=r.Deferred().alw...
  function mb (line 4) | function mb(a){return a.getAttribute&&a.getAttribute("class")||""}
  function xb (line 4) | function xb(a,b,c,d){var e;if(r.isArray(b))r.each(b,function(b,e){c||tb....
  function Jb (line 4) | function Jb(a){return function(b,c){"string"!=typeof b&&(c=b,b="*");var ...
  function Kb (line 4) | function Kb(a,b,c,d){var e={},f=a===Gb;function g(h){var i;return e[h]=!...
  function Lb (line 4) | function Lb(a,b){var c,d,e=r.ajaxSettings.flatOptions||{};for(c in b)voi...
  function Mb (line 4) | function Mb(a,b,c){var d,e,f,g,h=a.contents,i=a.dataTypes;while("*"===i[...
  function Nb (line 4) | function Nb(a,b,c,d){var e,f,g,h,i,j={},k=a.dataTypes.slice();if(k[1])fo...
  function A (line 4) | function A(b,c,d,h){var j,m,n,v,w,x=c;k||(k=!0,i&&a.clearTimeout(i),e=vo...
  function Sb (line 4) | function Sb(a){return r.isWindow(a)?a:9===a.nodeType&&a.defaultView}

FILE: docs/sphinx/_build/_static/js/theme.js
  function s (line 1) | function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&re...
  function ThemeNav (line 5) | function ThemeNav () {

FILE: docs/sphinx/_build/_static/searchtools.js
  function splitQuery (line 307) | function splitQuery(query) {
  function pulse (line 381) | function pulse() {
  function displayNextItem (line 513) | function displayNextItem() {

FILE: docs/sphinx/_build/_static/underscore-1.13.1.js
  function restArguments (line 64) | function restArguments(func, startIndex) {
  function isObject (line 88) | function isObject(obj) {
  function isNull (line 94) | function isNull(obj) {
  function isUndefined (line 99) | function isUndefined(obj) {
  function isBoolean (line 104) | function isBoolean(obj) {
  function isElement (line 109) | function isElement(obj) {
  function tagTester (line 114) | function tagTester(name) {
  function ie10IsDataView (line 162) | function ie10IsDataView(obj) {
  function has$1 (line 173) | function has$1(obj, key) {
  function isFinite$1 (line 192) | function isFinite$1(obj) {
  function isNaN$1 (line 197) | function isNaN$1(obj) {
  function constant (line 202) | function constant(value) {
  function createSizePropertyCheck (line 209) | function createSizePropertyCheck(getSizeProperty) {
  function shallowProperty (line 217) | function shallowProperty(key) {
  function isTypedArray (line 232) | function isTypedArray(obj) {
  function emulatedSet (line 248) | function emulatedSet(keys) {
  function collectNonEnumProps (line 263) | function collectNonEnumProps(obj, keys) {
  function keys (line 283) | function keys(obj) {
  function isEmpty (line 295) | function isEmpty(obj) {
  function isMatch (line 307) | function isMatch(object, attrs) {
  function _$1 (line 321) | function _$1(obj) {
  function toBufferView (line 344) | function toBufferView(bufferSource) {
  function eq (line 356) | function eq(a, b, aStack, bStack) {
  function deepEq (line 371) | function deepEq(a, b, aStack, bStack) {
  function isEqual (line 476) | function isEqual(a, b) {
  function allKeys (line 481) | function allKeys(obj) {
  function ie11fingerprint (line 494) | function ie11fingerprint(methods) {
  function values (line 533) | function values(obj) {
  function pairs (line 545) | function pairs(obj) {
  function invert (line 556) | function invert(obj) {
  function functions (line 566) | function functions(obj) {
  function createAssigner (line 575) | function createAssigner(keysFunc, defaults) {
  function ctor (line 605) | function ctor() {
  function baseCreate (line 610) | function baseCreate(prototype) {
  function create (line 623) | function create(prototype, props) {
  function clone (line 630) | function clone(obj) {
  function tap (line 638) | function tap(obj, interceptor) {
  function toPath$1 (line 645) | function toPath$1(path) {
  function toPath (line 652) | function toPath(path) {
  function deepGet (line 657) | function deepGet(obj, path) {
  function get (line 670) | function get(object, path, defaultValue) {
  function has (line 678) | function has(obj, path) {
  function identity (line 690) | function identity(value) {
  function matcher (line 696) | function matcher(attrs) {
  function property (line 705) | function property(path) {
  function optimizeCb (line 715) | function optimizeCb(func, context, argCount) {
  function baseIteratee (line 737) | function baseIteratee(value, context, argCount) {
  function iteratee (line 747) | function iteratee(value, context) {
  function cb (line 754) | function cb(value, context, argCount) {
  function mapObject (line 761) | function mapObject(obj, iteratee, context) {
  function noop (line 774) | function noop(){}
  function propertyOf (line 777) | function propertyOf(obj) {
  function times (line 785) | function times(n, iteratee, context) {
  function random (line 793) | function random(min, max) {
  function createEscaper (line 808) | function createEscaper(map) {
  function escapeChar (line 867) | function escapeChar(match) {
  function template (line 882) | function template(text, settings, oldSettings) {
  function result (line 950) | function result(obj, path, fallback) {
  function uniqueId (line 970) | function uniqueId(prefix) {
  function chain (line 976) | function chain(obj) {
  function executeBound (line 985) | function executeBound(sourceFunc, boundFunc, context, callingContext, ar...
  function flatten$1 (line 1030) | function flatten$1(input, depth, strict, output) {
  function memoize (line 1071) | function memoize(func, hasher) {
  function throttle (line 1099) | function throttle(func, wait, options) {
  function debounce (line 1144) | function debounce(func, wait, immediate) {
  function wrap (line 1181) | function wrap(func, wrapper) {
  function negate (line 1186) | function negate(predicate) {
  function compose (line 1194) | function compose() {
  function after (line 1206) | function after(times, func) {
  function before (line 1216) | function before(times, func) {
  function findKey (line 1232) | function findKey(obj, predicate, context) {
  function createPredicateIndexFinder (line 1242) | function createPredicateIndexFinder(dir) {
  function sortedIndex (line 1262) | function sortedIndex(array, obj, iteratee, context) {
  function createIndexFinder (line 1274) | function createIndexFinder(dir, predicateFind, sortedIndex) {
  function find (line 1309) | function find(obj, predicate, context) {
  function findWhere (line 1317) | function findWhere(obj, attrs) {
  function each (line 1325) | function each(obj, iteratee, context) {
  function map (line 1342) | function map(obj, iteratee, context) {
  function createReduce (line 1355) | function createReduce(dir) {
  function filter (line 1387) | function filter(obj, predicate, context) {
  function reject (line 1397) | function reject(obj, predicate, context) {
  function every (line 1402) | function every(obj, predicate, context) {
  function some (line 1414) | function some(obj, predicate, context) {
  function contains (line 1426) | function contains(obj, item, fromIndex, guard) {
  function pluck (line 1456) | function pluck(obj, key) {
  function where (line 1462) | function where(obj, attrs) {
  function max (line 1467) | function max(obj, iteratee, context) {
  function min (line 1492) | function min(obj, iteratee, context) {
  function sample (line 1520) | function sample(obj, n, guard) {
  function shuffle (line 1539) | function shuffle(obj) {
  function sortBy (line 1544) | function sortBy(obj, iteratee, context) {
  function group (line 1565) | function group(behavior, partition) {
  function toArray (line 1604) | function toArray(obj) {
  function size (line 1616) | function size(obj) {
  function keyInObj (line 1623) | function keyInObj(value, key, obj) {
  function initial (line 1665) | function initial(array, n, guard) {
  function first (line 1671) | function first(array, n, guard) {
  function rest (line 1680) | function rest(array, n, guard) {
  function last (line 1686) | function last(array, n, guard) {
  function compact (line 1693) | function compact(array) {
  function flatten (line 1699) | function flatten(array, depth) {
  function uniq (line 1722) | function uniq(array, isSorted, iteratee, context) {
  function intersection (line 1757) | function intersection(array) {
  function unzip (line 1774) | function unzip(array) {
  function object (line 1791) | function object(list, values) {
  function range (line 1806) | function range(start, stop, step) {
  function chunk (line 1827) | function chunk(array, count) {
  function chainResult (line 1838) | function chainResult(instance, obj) {
  function mixin (line 1843) | function mixin(obj) {

FILE: docs/sphinx/_build/_static/underscore-1.3.1.js
  function eq (line 669) | function eq(a, b, stack) {

FILE: docs/sphinx/_build/_static/underscore.js
  function q (line 8) | function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)re...

FILE: docs/sphinx/_build/_static/websupport.js
  function init (line 47) | function init() {
  function initEvents (line 52) | function initEvents() {
  function setComparator (line 107) | function setComparator() {
  function initComparator (line 127) | function initComparator() {
  function show (line 147) | function show(id) {
  function hide (line 168) | function hide(id) {
  function getComments (line 181) | function getComments(id) {
  function addComment (line 216) | function addComment(form) {
  function appendComments (line 275) | function appendComments(comments, ul) {
  function insertComment (line 290) | function insertComment(comment) {
  function acceptComment (line 320) | function acceptComment(id) {
  function deleteComment (line 335) | function deleteComment(id) {
  function showProposal (line 369) | function showProposal(id) {
  function hideProposal (line 375) | function hideProposal(id) {
  function showProposeChange (line 381) | function showProposeChange(id) {
  function hideProposeChange (line 390) | function hideProposeChange(id) {
  function toggleCommentMarkupBox (line 398) | function toggleCommentMarkupBox(id) {
  function handleReSort (line 403) | function handleReSort(link) {
  function handleVote (line 426) | function handleVote(link) {
  function openReply (line 488) | function openReply(id) {
  function closeReply (line 516) | function closeReply(id) {
  function sortComments (line 530) | function sortComments(comments) {
  function getChildren (line 542) | function getChildren(ul, recursive) {
  function createCommentDiv (line 555) | function createCommentDiv(comment) {
  function renderTemplate (line 593) | function renderTemplate(template, context) {
  function showError (line 610) | function showError(message) {
Condensed preview — 518 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (7,609K chars).
[
  {
    "path": ".gitignore",
    "chars": 284,
    "preview": "*$py.class\n*,cover\n*.egg\n*.egg-info/\n*.log\n*.manifest\n*.mo\n*.pot\n*.py[co]\n*.py[cod]\n*.so\n*.spec\n*/.DS_Store\n*~\n.DS_Store"
  },
  {
    "path": "Experiments/AGCRN/AGCRN.py",
    "chars": 6981,
    "preview": "import os\nimport GPUtil\nimport torch\nimport argparse\nimport configparser\n\nfrom datetime import datetime\nfrom UCTB.model."
  },
  {
    "path": "Experiments/AGCRN/Runner.py",
    "chars": 4132,
    "preview": "import os\n\n#############################################\n# BenchMark Bike\n#############################################\n"
  },
  {
    "path": "Experiments/AGCRN/params.conf",
    "chars": 564,
    "preview": "[data]\nnum_nodes =717 \nlag = 12\nhorizon = 1\nval_ratio = 0.2\ntest_ratio = 0.2\ntod = False\nnormalizer = std\ncolumn_wise = "
  },
  {
    "path": "Experiments/ARIMA/ARIMA.py",
    "chars": 3522,
    "preview": "import numpy as np\nimport argparse\nfrom tqdm import tqdm\nfrom UCTB.model import ARIMA\nfrom UCTB.dataset import NodeTraff"
  },
  {
    "path": "Experiments/ARIMA/ARIMA_Parallel.py",
    "chars": 3226,
    "preview": "import os\nimport numpy as np\nimport argparse\n\nfrom UCTB.model import ARIMA\nfrom UCTB.dataset import NodeTrafficLoader\nfr"
  },
  {
    "path": "Experiments/ARIMA/RunnerARIMA.py",
    "chars": 1700,
    "preview": "import os\nfrom tqdm import tqdm\n# dataset = [['Bike','NYC','all','365','sum','0.1'],['DiDi','Xian','all','all','sum','0."
  },
  {
    "path": "Experiments/ARIMA/trials.py",
    "chars": 1126,
    "preview": "import os\n\nfrom UCTB.utils import multiple_process\n\n\ndef task_func(share_queue, locker, data, parameters):\n\n    print('C"
  },
  {
    "path": "Experiments/ASTGCN/ASTGCN.py",
    "chars": 4939,
    "preview": "import torch\nimport os\nimport GPUtil\nimport argparse\nimport configparser\n\nfrom UCTB.model.ASTGCN import make_model\nfrom "
  },
  {
    "path": "Experiments/ASTGCN/Runner.py",
    "chars": 4146,
    "preview": "import os\n\n# #############################################\n# # BenchMark Bike\n# ########################################"
  },
  {
    "path": "Experiments/ASTGCN/configurations/METR_LA_astgcn.conf",
    "chars": 534,
    "preview": "[Data]\nadj_filename = ./data/METR_LA/distance_LA.csv\ngraph_signal_matrix_filename = ./data/METR_LA/METR_LA.npz\nnum_of_ve"
  },
  {
    "path": "Experiments/ASTGCN/configurations/PEMS04_astgcn.conf",
    "chars": 523,
    "preview": "[Data]\nadj_filename = ./data/PEMS04/distance.csv\ngraph_signal_matrix_filename = ./data/PEMS04/PEMS04.npz\nnum_of_vertices"
  },
  {
    "path": "Experiments/ASTGCN/configurations/PEMS08_astgcn.conf",
    "chars": 521,
    "preview": "[Data]\nadj_filename = ../data/PEMS08/PEMS08.csv\ngraph_signal_matrix_filename = ../data/PEMS08/PEMS08.npz\nnum_of_vertices"
  },
  {
    "path": "Experiments/CustomizedDemo/Runner_topk.py",
    "chars": 480,
    "preview": "import os\nimport numpy as np\nimport heapq\n\n#############################################\n# BenchMark Metro Shangahi (top"
  },
  {
    "path": "Experiments/CustomizedDemo/STMeta_Obj_topk.py",
    "chars": 8738,
    "preview": "import os\nimport nni\nimport yaml\nimport argparse\nimport GPUtil\nimport numpy as np\nfrom UCTB.dataset import DataSet\n\nfrom"
  },
  {
    "path": "Experiments/CustomizedDemo/STMeta_v0.model.yml",
    "chars": 402,
    "preview": "# network structure parameters\nst_method: 'LSTM'\ntemporal_merge: 'gal'\ngraph_merge: 'gal'\n\n# gcn parameters\ngcn_k: 0\ngcn"
  },
  {
    "path": "Experiments/CustomizedDemo/STMeta_v1.model.yml",
    "chars": 401,
    "preview": "# network structure parameters\nst_method: 'GCLSTM'\ntemporal_merge: 'gal'\ngraph_merge: 'gal'\n\n# gcn parameters\ngcn_k: 1\ng"
  },
  {
    "path": "Experiments/CustomizedDemo/STMeta_v2.model.yml",
    "chars": 404,
    "preview": "# network structure parameters\nst_method: 'GCLSTM'\ntemporal_merge: 'concat'\ngraph_merge: 'gal'\n\n# gcn parameters\ngcn_k: "
  },
  {
    "path": "Experiments/CustomizedDemo/STMeta_v3.model.yml",
    "chars": 499,
    "preview": "# network structure parameters\nst_method: 'DCRNN'\ntemporal_merge: 'gal'\ngraph_merge: 'gal'\n\n# gcn parameters\ngcn_k: 1\ngc"
  },
  {
    "path": "Experiments/CustomizedDemo/metro_shanghai.data.yml",
    "chars": 415,
    "preview": "# dataset and city\ndataset: Metro\ncity: Shanghai\n\ncloseness_len: 6\nperiod_len: 7\ntrend_len: 4\n\ngraph: Distance-Correlati"
  },
  {
    "path": "Experiments/CustomizedDemo/topKGraph.py",
    "chars": 1949,
    "preview": "import heapq\nimport numpy as np\nfrom UCTB.preprocess.GraphGenerator import GraphGenerator\n\n\nclass topKGraph(GraphGenerat"
  },
  {
    "path": "Experiments/DCRNN/DCRNN.py",
    "chars": 7206,
    "preview": "import os\nimport numpy as np\n\nfrom UCTB.dataset import NodeTrafficLoader\nfrom UCTB.model import DCRNN\nfrom UCTB.evaluati"
  },
  {
    "path": "Experiments/DCRNN/bike_trial.py",
    "chars": 2298,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_dcrnn = ('python DCRNN.py '\n                "
  },
  {
    "path": "Experiments/DCRNN/cs_trial.py",
    "chars": 1229,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_dcrnn = ('python DCRNN.py '\n                "
  },
  {
    "path": "Experiments/DCRNN/didi_trial.py",
    "chars": 1687,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python DCRNN.py '\n              "
  },
  {
    "path": "Experiments/DCRNN/metr_trial.py",
    "chars": 1370,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python DCRNN.py '\n              "
  },
  {
    "path": "Experiments/DCRNN/metro_trial.py",
    "chars": 1711,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python DCRNN.py '\n              "
  },
  {
    "path": "Experiments/DCRNN/pems_trial.py",
    "chars": 1374,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python DCRNN.py '\n              "
  },
  {
    "path": "Experiments/DCRNN/street_didi_trial.py",
    "chars": 1553,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python DCRNN.py '\n              "
  },
  {
    "path": "Experiments/DeepST/DeepST.py",
    "chars": 2871,
    "preview": "import nni\n\nfrom UCTB.dataset import GridTrafficLoader\nfrom UCTB.model import DeepST\nfrom UCTB.evaluation import metric\n"
  },
  {
    "path": "Experiments/DeepST/param_search.yml",
    "chars": 375,
    "preview": "authorName: DiChai\r\nexperimentName: search_space\r\ntrialConcurrency: 1\r\nmaxExecDuration: 24h\r\nmaxTrialNum: 50\r\ntrainingSe"
  },
  {
    "path": "Experiments/DeepST/search_space.json",
    "chars": 288,
    "preview": "{\r\n    \"num_conv_filters\": {\"_type\":\"choice\",\"_value\":[32, 64, 128]},\r\n\r\n    \"kernel_size\": {\"_type\":\"choice\",\"_value\":["
  },
  {
    "path": "Experiments/GBRT/GBRT.py",
    "chars": 3479,
    "preview": "import numpy as np\nimport argparse\nfrom sklearn.ensemble import GradientBoostingRegressor\nfrom UCTB.dataset import NodeT"
  },
  {
    "path": "Experiments/GBRT/gbrt_config.yml",
    "chars": 1410,
    "preview": "authorName: lychen\nexperimentName: gbrt_parameter_search\ntrialConcurrency: 1\nmaxExecDuration: 72h\nmaxTrialNum: 200\ntrain"
  },
  {
    "path": "Experiments/GBRT/gbrt_search_space.json",
    "chars": 271,
    "preview": "{\n\n    \"CT\": {\"_type\": \"randint\", \"_value\": [0,13]},\n    \"PT\": {\"_type\": \"randint\", \"_value\": [0,15]},\n    \"TT\": {\"_type"
  },
  {
    "path": "Experiments/GMAN/GMAN.py",
    "chars": 5809,
    "preview": "import time\nimport argparse\nimport os\n\nfrom UCTB.evaluation import metric\nfrom UCTB.model.GMAN import Graph\nfrom UCTB.da"
  },
  {
    "path": "Experiments/GMAN/Runner.py",
    "chars": 6817,
    "preview": "import os\n\nimport os\n\n#############################################\n# BenchMark Bike\n###################################"
  },
  {
    "path": "Experiments/GraphWaveNet/GraphWaveNet.py",
    "chars": 4889,
    "preview": "import torch\nimport argparse\nimport time\nimport os\n\nfrom UCTB.utils.utils_GraphWaveNet import *\nfrom UCTB.preprocess.Gra"
  },
  {
    "path": "Experiments/GraphWaveNet/Runner.py",
    "chars": 6085,
    "preview": "import os\n\n#############################################\n# BenchMark Bike\n#############################################\n"
  },
  {
    "path": "Experiments/HM/HM.py",
    "chars": 2463,
    "preview": "from UCTB.dataset import NodeTrafficLoader\nfrom UCTB.model import HM\nimport argparse\nfrom UCTB.evaluation import metric\n"
  },
  {
    "path": "Experiments/HM/hm_closeness_search_space.json",
    "chars": 201,
    "preview": "{\n    \"CT\": {\"_type\": \"choice\", \"_value\": [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24]},\n    \"PT\": {"
  },
  {
    "path": "Experiments/HM/hm_config.yml",
    "chars": 1566,
    "preview": "authorName: lychen\nexperimentName: hm_parameter_search\ntrialConcurrency: 2\nmaxExecDuration: 24h\nmaxTrialNum: 24\ntraining"
  },
  {
    "path": "Experiments/HM/hm_search_space.json",
    "chars": 174,
    "preview": "{\n    \"CT\": {\"_type\": \"choice\", \"_value\": [0,1,2,3,4,5,6]},\n    \"PT\": {\"_type\": \"choice\", \"_value\": [0,1,2,3,4,5,6,7]},\n"
  },
  {
    "path": "Experiments/HMM/HMM.py",
    "chars": 2086,
    "preview": "import nni\nimport argparse\n\nfrom UCTB.model import HMM\nfrom UCTB.dataset import NodeTrafficLoader\nfrom UCTB.evaluation i"
  },
  {
    "path": "Experiments/HMM/trials.py",
    "chars": 1125,
    "preview": "import os\n\nfrom UCTB.utils import multiple_process\n\n\ndef task_func(share_queue, locker, data, parameters):\n\n    print('C"
  },
  {
    "path": "Experiments/MTGNN/MTGNN.py",
    "chars": 11405,
    "preview": "import argparse\nfrom UCTB.dataset import NodeTrafficLoader\nimport os\nfrom UCTB.utils.utils_MTGNN import load_dataset\nfro"
  },
  {
    "path": "Experiments/MTGNN/Runner.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "Experiments/MultiStepPredict/Code/DirRec_ARIMA.py",
    "chars": 2270,
    "preview": "import time\nimport numpy as np\nimport pandas as pd\nfrom sklearn.metrics import mean_absolute_error as MAE\n\nfrom UCTB.mod"
  },
  {
    "path": "Experiments/MultiStepPredict/Code/DirRec_DCRNN.py",
    "chars": 6122,
    "preview": "import time\nimport numpy as np\nimport pandas as pd\nfrom sklearn.metrics import mean_absolute_error as MAE\n\nfrom UCTB.dat"
  },
  {
    "path": "Experiments/MultiStepPredict/Code/DirRec_STMeta.py",
    "chars": 5055,
    "preview": "import time\nimport numpy as np\nimport pandas as pd\nfrom sklearn.metrics import mean_absolute_error as MAE\n\nfrom UCTB.dat"
  },
  {
    "path": "Experiments/MultiStepPredict/Code/DirRec_XGBoost.py",
    "chars": 3528,
    "preview": "import time\nimport numpy as np\nimport pandas as pd\nfrom sklearn.metrics import mean_absolute_error as MAE\n\nfrom UCTB.dat"
  },
  {
    "path": "Experiments/MultiStepPredict/Code/viz.py",
    "chars": 2152,
    "preview": "import pandas as pd\nimport matplotlib.pyplot as plt\nimport numpy as np\n\ndef readMetric(metric_name, eva_dir, horizon_num"
  },
  {
    "path": "Experiments/MultiStepPredict/README.md",
    "chars": 5057,
    "preview": "## Method\n\nThe implementation of multi-step prediction refers to the following survey.\n\n> An N H, Anh D T. Comparison of"
  },
  {
    "path": "Experiments/ParameterSearch/ARIMA.py",
    "chars": 907,
    "preview": "import numpy as np\r\n\r\nfrom UCTB.model import ARIMA\r\nfrom UCTB.dataset import NodeTrafficLoader\r\nfrom UCTB.evaluation imp"
  },
  {
    "path": "Experiments/ParameterSearch/CPT_GBRT.py",
    "chars": 2079,
    "preview": "import numpy as np\nfrom UCTB.dataset import NodeTrafficLoader\nfrom sklearn.ensemble import GradientBoostingRegressor\nfro"
  },
  {
    "path": "Experiments/ParameterSearch/CPT_HM.py",
    "chars": 1138,
    "preview": "import nni\r\n\r\nfrom UCTB.dataset import NodeTrafficLoader\r\nfrom UCTB.model import HM\r\nfrom UCTB.evaluation import metric\r"
  },
  {
    "path": "Experiments/ParameterSearch/CPT_STMeta_Obj.py",
    "chars": 6366,
    "preview": "import os\r\nimport nni\r\nimport numpy as np\r\n\r\nfrom UCTB.dataset import NodeTrafficLoader\r\nfrom UCTB.model import STMeta_V"
  },
  {
    "path": "Experiments/ParameterSearch/CPT_XGBoost.py",
    "chars": 2331,
    "preview": "import nni\r\nimport numpy as np\r\n\r\nfrom UCTB.dataset import NodeTrafficLoader_CPT, NodeTrafficLoader\r\nfrom UCTB.model imp"
  },
  {
    "path": "Experiments/ParameterSearch/config.yml",
    "chars": 381,
    "preview": "authorName: DiChai\r\nexperimentName: parameter_search\r\ntrialConcurrency: 2\r\nmaxExecDuration: 24h\r\nmaxTrialNum: 50\r\ntraini"
  },
  {
    "path": "Experiments/ParameterSearch/hm_config.yml",
    "chars": 379,
    "preview": "authorName: DiChai\r\nexperimentName: hm_parameter_search\r\ntrialConcurrency: 8\r\nmaxExecDuration: 24h\r\nmaxTrialNum: 200\r\ntr"
  },
  {
    "path": "Experiments/ParameterSearch/hm_search_space.json",
    "chars": 260,
    "preview": "{\r\n\r\n    \"Dataset\": {\"_type\": \"choice\", \"_value\": [\"Bike\"]},\r\n    \"City\": {\"_type\": \"choice\", \"_value\": [\"DC\"]},\r\n\r\n    "
  },
  {
    "path": "Experiments/ParameterSearch/plot_paper.ipynb",
    "chars": 2237,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 13,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n"
  },
  {
    "path": "Experiments/ParameterSearch/results/HM_Bike_NYC.json",
    "chars": 287065,
    "preview": "{\r\n    \"experimentParameters\": {\r\n        \"id\": \"rU6J0i83\",\r\n        \"revision\": 287,\r\n        \"execDuration\": 854,\r\n   "
  },
  {
    "path": "Experiments/ParameterSearch/search_space.json",
    "chars": 906,
    "preview": "{\r\n\r\n    \"Dataset\": {\"_type\": \"choice\", \"_value\": [\"DiDi\"]},\r\n    \"City\": {\"_type\": \"choice\", \"_value\": [\"Xian\"]},\r\n\r\n  "
  },
  {
    "path": "Experiments/ParameterSearch/xgboost_config.yml",
    "chars": 394,
    "preview": "authorName: DiChai\r\nexperimentName: xgboost_parameter_search\r\ntrialConcurrency: 2\r\nmaxExecDuration: 24h\r\nmaxTrialNum: 20"
  },
  {
    "path": "Experiments/ParameterSearch/xgboost_search_space.json",
    "chars": 380,
    "preview": "{\r\n\r\n    \"Dataset\": {\"_type\": \"choice\", \"_value\": [\"Bike\"]},\r\n    \"City\": {\"_type\": \"choice\", \"_value\": [\"NYC\"]},\r\n\r\n   "
  },
  {
    "path": "Experiments/RegionGeneration/region_generation.py",
    "chars": 886,
    "preview": "import pandas as pd\nfrom UCTB.preprocess.RegionGenerator import RegionGenerator\nfrom UCTB.preprocess.dataset_helper impo"
  },
  {
    "path": "Experiments/STGCN/Runner.py",
    "chars": 6699,
    "preview": "import os\n\n# #############################################\n# # BenchMark Bike\n# ########################################"
  },
  {
    "path": "Experiments/STGCN/STGCN.py",
    "chars": 3848,
    "preview": "import os\nimport tensorflow as tf\nfrom UCTB.dataset import NodeTrafficLoader\nfrom UCTB.preprocess.GraphGenerator import "
  },
  {
    "path": "Experiments/STMeta/RunnerCPTtrial.py",
    "chars": 476,
    "preview": "import os\n###############################################\n# C P T trial\n###############################################\n"
  },
  {
    "path": "Experiments/STMeta/RunnerLSTM.py",
    "chars": 695,
    "preview": "import os\n\n############################################################################################################\n"
  },
  {
    "path": "Experiments/STMeta/RunnerStreetDiDi.py",
    "chars": 4728,
    "preview": "import os\n\n###############################################\n# BenchMark DiDi\n############################################"
  },
  {
    "path": "Experiments/STMeta/RunnerWWW.py",
    "chars": 9124,
    "preview": "import os\n\n#############################################\n# BenchMark Bike\n#############################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_GRU.py",
    "chars": 1508,
    "preview": "import os\n\n#############################################\n# BenchMark Bike\n#############################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_M1_0.py",
    "chars": 717,
    "preview": "import os\n\n############################################################################################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_M1_1.py",
    "chars": 883,
    "preview": "import os\n\n############################################################################################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_M2_0.py",
    "chars": 570,
    "preview": "import os\n\n############################################################################################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_M2_1.py",
    "chars": 413,
    "preview": "import os\n\n############################################################################################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_Main.py",
    "chars": 29647,
    "preview": "import os\n\n#############################################\n# BenchMark Bike\n#############################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_PS_Chicago.py",
    "chars": 3312,
    "preview": "import os\n\n############################################################################################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_PS_NYC.py",
    "chars": 3240,
    "preview": "import os\n\n############################################################################################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_PS_Shanghai.py",
    "chars": 1692,
    "preview": "import os\n\n############################################################################################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_singleGraph.py",
    "chars": 4514,
    "preview": "import os\n\n#############################################\n# BenchMark Bike\n#############################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_temporalAblation.py",
    "chars": 5739,
    "preview": "import os\n\n#############################################\n# BenchMark Bike\n#############################################\n"
  },
  {
    "path": "Experiments/STMeta/Runner_v3.py",
    "chars": 2129,
    "preview": "import os\n\n#############################################\n# BenchMark Bike\n#############################################\n"
  },
  {
    "path": "Experiments/STMeta/STMeta_Obj.py",
    "chars": 9743,
    "preview": "import os\nimport nni\nimport yaml\nimport argparse\nimport GPUtil\n\nfrom UCTB.dataset import NodeTrafficLoader\nfrom UCTB.mod"
  },
  {
    "path": "Experiments/STMeta/STMeta_Obj_time.py",
    "chars": 9591,
    "preview": "import os\nimport yaml\nimport argparse\nimport GPUtil\nimport numpy as np\nfrom time import time\n\nfrom UCTB.dataset import N"
  },
  {
    "path": "Experiments/STMeta/STMeta_v0.model.yml",
    "chars": 402,
    "preview": "# network structure parameters\nst_method: 'LSTM'\ntemporal_merge: 'gal'\ngraph_merge: 'gal'\n\n# gcn parameters\ngcn_k: 0\ngcn"
  },
  {
    "path": "Experiments/STMeta/STMeta_v1.model.yml",
    "chars": 424,
    "preview": "# network structure parameters\r\nst_method: 'GCLSTM'\r\ntemporal_merge: 'gal'\r\ngraph_merge: 'gal'\r\n\r\n# gcn parameters\r\ngcn_"
  },
  {
    "path": "Experiments/STMeta/STMeta_v2.model.yml",
    "chars": 427,
    "preview": "# network structure parameters\r\nst_method: 'GCLSTM'\r\ntemporal_merge: 'concat'\r\ngraph_merge: 'gal'\r\n\r\n# gcn parameters\r\ng"
  },
  {
    "path": "Experiments/STMeta/STMeta_v3.model.yml",
    "chars": 527,
    "preview": "# network structure parameters\r\nst_method: 'DCRNN'\r\ntemporal_merge: 'gal'\r\ngraph_merge: 'gal'\r\n\r\n# gcn parameters\r\ngcn_k"
  },
  {
    "path": "Experiments/STMeta/bike_chicago.data.yml",
    "chars": 475,
    "preview": "# dataset and city\r\ndataset: Bike\r\ncity: Chicago\r\n\r\ncloseness_len: 6\r\nperiod_len: 7\r\ntrend_len: 4\r\nwith_tpe: False\r\n\r\ngr"
  },
  {
    "path": "Experiments/STMeta/bike_dc.data.yml",
    "chars": 435,
    "preview": "# dataset and city\ndataset: Bike\ncity: DC\n\ncloseness_len: 6\nperiod_len: 7\ntrend_len: 4\nwith_tpe: False\n\ngraph: Distance-"
  },
  {
    "path": "Experiments/STMeta/bike_nyc.data.yml",
    "chars": 467,
    "preview": "# dataset and city\r\ndataset: Bike\r\ncity: NYC\r\n\r\ncloseness_len: 6\r\nperiod_len: 7\r\ntrend_len: 4\r\nwith_tpe: False\r\n\r\ngraph:"
  },
  {
    "path": "Experiments/STMeta/chargestation_beijing.data.yml",
    "chars": 469,
    "preview": "# dataset and city\r\ndataset: ChargeStation\r\ncity: Beijing\r\n\r\ncloseness_len: 6\r\nperiod_len: 7\r\ntrend_len: 4\r\nwith_tpe: Fa"
  },
  {
    "path": "Experiments/STMeta/didi_chengdu.data.yml",
    "chars": 442,
    "preview": "# dataset and city\ndataset: DiDi\ncity: Chengdu\n\ncloseness_len: 6\nperiod_len: 7\ntrend_len: 4\nwith_tpe: False\n\ngraph: Dist"
  },
  {
    "path": "Experiments/STMeta/didi_chengdu_street.data.yml",
    "chars": 456,
    "preview": "# dataset and city\ndataset: DiDi\ncity: Chengdu_Street\n\ncloseness_len: 6\nperiod_len: 7\ntrend_len: 4\nwith_tpe: False\n\ngrap"
  },
  {
    "path": "Experiments/STMeta/didi_xian.data.yml",
    "chars": 466,
    "preview": "# dataset and city\r\ndataset: DiDi\r\ncity: Xian\r\n\r\ncloseness_len: 6\r\nperiod_len: 7\r\ntrend_len: 4\r\nwith_tpe: False\r\n\r\ngraph"
  },
  {
    "path": "Experiments/STMeta/didi_xian_street.data.yml",
    "chars": 450,
    "preview": "# dataset and city\ndataset: DiDi\ncity: Xian_Street\n\ncloseness_len: 6\nperiod_len: 7\ntrend_len: 4\nwith_tpe: False\n\ngraph: "
  },
  {
    "path": "Experiments/STMeta/gc_search.json",
    "chars": 536,
    "preview": "{\n    \"threshold_correlation\": {\"_type\":\"choice\",\"_value\":[0.65]},\n\n    \"gcn_k\": {\"_type\":\"choice\",\"_value\":[1, 2, 3]},\n"
  },
  {
    "path": "Experiments/STMeta/lstm_search.json",
    "chars": 611,
    "preview": "{\r\n    \"threshold_correlation\": {\"_type\":\"choice\",\"_value\":[0.5, 0.55, 0.6, 0.65, 0.7]},\r\n\r\n    \"batch_size\": {\"_type\":\""
  },
  {
    "path": "Experiments/STMeta/metr_la.data.yml",
    "chars": 424,
    "preview": "# dataset and city\ndataset: METR\ncity: LA\n\ncloseness_len: 6\nperiod_len: 7\ntrend_len: 4\nwith_tpe: False\n\ngraph: Distance-"
  },
  {
    "path": "Experiments/STMeta/metro_chongqing.data.yml",
    "chars": 439,
    "preview": "# dataset and city\ndataset: Metro\ncity: Chongqing\n\ncloseness_len: 6\nperiod_len: 7\ntrend_len: 4\nwith_tpe: False\n\ngraph: D"
  },
  {
    "path": "Experiments/STMeta/metro_shanghai.data.yml",
    "chars": 437,
    "preview": "# dataset and city\ndataset: Metro\ncity: Shanghai\n\ncloseness_len: 6\nperiod_len: 7\ntrend_len: 4\nwith_tpe: False\n\ngraph: Di"
  },
  {
    "path": "Experiments/STMeta/param_search.yml",
    "chars": 456,
    "preview": "authorName: DiChai\r\nexperimentName: network_search\r\ntrialConcurrency: 1\r\nmaxExecDuration: 24h\r\nmaxTrialNum: 50\r\ntraining"
  },
  {
    "path": "Experiments/STMeta/pems_bay.data.yml",
    "chars": 426,
    "preview": "# dataset and city\ndataset: PEMS\ncity: BAY\n\ncloseness_len: 6\nperiod_len: 7\ntrend_len: 4\nwith_tpe: False\n\ngraph: Distance"
  },
  {
    "path": "Experiments/STMeta_Transfer/Runner.py",
    "chars": 5316,
    "preview": "import os\n\n# os.system('python STMeta_Transfer_Test.py --source_data bike_nyc.data.yml --target_data bike_chicago.data.y"
  },
  {
    "path": "Experiments/STMeta_Transfer/STMeta_Pretrain.py",
    "chars": 9271,
    "preview": "import os\nimport yaml\nimport argparse\nimport GPUtil\nimport numpy as np\n\nfrom UCTB.dataset import TransferDataLoader\nfrom"
  },
  {
    "path": "Experiments/STMeta_Transfer/STMeta_Transfer.py",
    "chars": 17476,
    "preview": "import os\nimport yaml\nimport argparse\nimport GPUtil\nimport numpy as np\n\nfrom UCTB.dataset import TransferDataLoader\nfrom"
  },
  {
    "path": "Experiments/STMeta_Transfer/STMeta_Transfer_Dynamic.py",
    "chars": 18419,
    "preview": "import os\nimport yaml\nimport argparse\nimport GPUtil\nimport numpy as np\n\nfrom UCTB.dataset import TransferDataLoader\nfrom"
  },
  {
    "path": "Experiments/STMeta_Transfer/STMeta_Transfer_Test.py",
    "chars": 24030,
    "preview": "import os\nimport yaml\nimport argparse\nimport GPUtil\nimport numpy as np\n\nfrom UCTB.dataset import TransferDataLoader\nfrom"
  },
  {
    "path": "Experiments/STMeta_Transfer/STMeta_v1.model.yml",
    "chars": 399,
    "preview": "# network structure parameters\r\nst_method: 'gclstm'\r\ntemporal_merge: 'gal'\r\ngraph_merge: 'gal'\r\n\r\n# gcn parameters\r\ngcn_"
  },
  {
    "path": "Experiments/STMeta_Transfer/STMeta_v2.model.yml",
    "chars": 402,
    "preview": "# network structure parameters\r\nst_method: 'gclstm'\r\ntemporal_merge: 'concat'\r\ngraph_merge: 'gal'\r\n\r\n# gcn parameters\r\ng"
  },
  {
    "path": "Experiments/STMeta_Transfer/STMeta_v3.model.yml",
    "chars": 504,
    "preview": "# network structure parameters\r\nst_method: 'gal_gcn'\r\ntemporal_merge: 'gal'\r\ngraph_merge: 'gal'\r\n\r\n# gcn parameters\r\ngcn"
  },
  {
    "path": "Experiments/STMeta_Transfer/STMeta_v4.model.yml",
    "chars": 423,
    "preview": "# network structure parameters\r\nst_method: 'gclstm'\r\ntemporal_merge: 'gal'\r\ngraph_merge: 'gal'\r\n\r\n# gcn parameters\r\ngcn_"
  },
  {
    "path": "Experiments/STMeta_Transfer/bike_chicago.data.yml",
    "chars": 425,
    "preview": "# dataset and city\r\ndataset: Bike\r\ncity: Chicago\r\n\r\ncloseness_len: 6\r\nperiod_len: 0\r\ntrend_len: 0\r\n\r\ngraph: Correlation\r"
  },
  {
    "path": "Experiments/STMeta_Transfer/bike_dc.data.yml",
    "chars": 415,
    "preview": "# dataset and city\r\ndataset: Bike\r\ncity: DC\r\n\r\ncloseness_len: 6\r\nperiod_len: 0\r\ntrend_len: 0\r\n\r\ngraph: Correlation\r\n\r\nda"
  },
  {
    "path": "Experiments/STMeta_Transfer/bike_nyc.data.yml",
    "chars": 417,
    "preview": "# dataset and city\r\ndataset: Bike\r\ncity: NYC\r\n\r\ncloseness_len: 6\r\nperiod_len: 0\r\ntrend_len: 0\r\n\r\ngraph: Correlation\r\n\r\nd"
  },
  {
    "path": "Experiments/STMeta_Transfer/chargestation_beijing.data.yml",
    "chars": 447,
    "preview": "# dataset and city\r\ndataset: ChargeStation\r\ncity: Beijing\r\n\r\ncloseness_len: 6\r\nperiod_len: 7\r\ntrend_len: 4\r\n\r\ngraph: Cor"
  },
  {
    "path": "Experiments/STMeta_Transfer/didi_chengdu.data.yml",
    "chars": 366,
    "preview": "# dataset and city\ndataset: DiDi\ncity: Chengdu\n\ncloseness_len: 6\nperiod_len: 0\ntrend_len: 0\n\ngraph: Distance\n\ndata_range"
  },
  {
    "path": "Experiments/STMeta_Transfer/didi_xian.data.yml",
    "chars": 360,
    "preview": "# dataset and city\ndataset: DiDi\ncity: Xian\n\ncloseness_len: 6\nperiod_len: 0\ntrend_len: 0\n\ngraph: Distance\n\ndata_range: a"
  },
  {
    "path": "Experiments/STMeta_Transfer/metro_chongqing.data.yml",
    "chars": 400,
    "preview": "# dataset and city\r\ndataset: Metro\r\ncity: Chongqing\r\n\r\ncloseness_len: 6\r\nperiod_len: 0\r\ntrend_len: 0\r\n\r\ngraph: Correlati"
  },
  {
    "path": "Experiments/STMeta_Transfer/metro_shanghai.data.yml",
    "chars": 400,
    "preview": "# dataset and city\r\ndataset: Metro\r\ncity: ShanghaiV1\r\n\r\ncloseness_len: 6\r\nperiod_len: 0\r\ntrend_len: 0\r\n\r\ngraph: Correlat"
  },
  {
    "path": "Experiments/STMeta_Transfer/network_search.json",
    "chars": 213,
    "preview": "{\r\n    \"st_method\": {\"_type\":\"choice\",\"_value\":[\"gclstm\", \"gal_gcn\"]},\r\n\r\n    \"temporal_merge\": {\"_type\":\"choice\",\"_valu"
  },
  {
    "path": "Experiments/STMeta_Transfer/param_search.yml",
    "chars": 406,
    "preview": "authorName: DiChai\r\nexperimentName: network_search\r\ntrialConcurrency: 1\r\nmaxExecDuration: 24h\r\nmaxTrialNum: 50\r\ntraining"
  },
  {
    "path": "Experiments/STMeta_Transfer/transfer_record.md",
    "chars": 9727,
    "preview": "\n|    Graph    | Match        |   SD    |   TD    | transfer-ratio | TD-训练样本数量 | TD-Direct |  TD-FT  | TD-Transfer |\n| :"
  },
  {
    "path": "Experiments/STMeta_Transfer/transfer_record_bk.md",
    "chars": 25110,
    "preview": "#### Correlation graph transfer result, match using traffic flow (30 days)\n\n|   SD    |   TD    | transfer-ratio | TD-训练"
  },
  {
    "path": "Experiments/STMeta_Transfer/transfer_result_overall.md",
    "chars": 13706,
    "preview": "## Base experiment result\n\n#### Pre-train result\n\n|    Graph    |  City   | Result  |\n| :---------: | :-----: | :-----: "
  },
  {
    "path": "Experiments/STMeta_Transfer/使用流量匹配的结果.md",
    "chars": 3490,
    "preview": "|   SD    |   TD    | transfer-ratio | TD-训练样本数量 | TD-Direct |    TD-FT    | TD-Transfer |\n| :-----: | :-----: | :-----:"
  },
  {
    "path": "Experiments/STSGCN/Runner.py",
    "chars": 6942,
    "preview": "import os\n# #############################################\n# # BenchMark Bike\n# #########################################"
  },
  {
    "path": "Experiments/STSGCN/STSGCN.py",
    "chars": 2857,
    "preview": "import json\nimport argparse\nimport mxnet as mx\nfrom UCTB.dataset import NodeTrafficLoader\nfrom UCTB.model.STSGCN import "
  },
  {
    "path": "Experiments/STSGCN/config/PEMS03/STMeta_emb.json",
    "chars": 676,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"GLU\",\n    \"temporal_emb\": true,\n    \"spatial_emb\": true,\n    \"use_ma"
  },
  {
    "path": "Experiments/STSGCN/config/PEMS03/STMeta_emb_1.json",
    "chars": 676,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"GLU\",\n    \"temporal_emb\": true,\n    \"spatial_emb\": true,\n    \"use_ma"
  },
  {
    "path": "Experiments/STSGCN/config/PEMS03/individual_GLU_mask_emb.json",
    "chars": 676,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"GLU\",\n    \"temporal_emb\": true,\n    \"spatial_emb\": true,\n    \"use_ma"
  },
  {
    "path": "Experiments/STSGCN/config/PEMS03/individual_GLU_nomask_emb.json",
    "chars": 677,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"GLU\",\n    \"temporal_emb\": true,\n    \"spatial_emb\": true,\n    \"use_ma"
  },
  {
    "path": "Experiments/STSGCN/config/PEMS03/individual_GLU_nomask_noemb.json",
    "chars": 679,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"GLU\",\n    \"temporal_emb\": false,\n    \"spatial_emb\": false,\n    \"use_"
  },
  {
    "path": "Experiments/STSGCN/config/PEMS03/individual_relu_nomask_noemb.json",
    "chars": 680,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"relu\",\n    \"temporal_emb\": false,\n    \"spatial_emb\": false,\n    \"use"
  },
  {
    "path": "Experiments/STSGCN/config/PEMS03/sharing_relu_nomask_noemb.json",
    "chars": 677,
    "preview": "{\n    \"module_type\": \"sharing\",\n    \"act_type\": \"relu\",\n    \"temporal_emb\": false,\n    \"spatial_emb\": false,\n    \"use_ma"
  },
  {
    "path": "Experiments/STSGCN/config/PEMS04/individual_GLU.json",
    "chars": 532,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"GLU\",\n    \"first_layer_embedding_size\": 64,\n    \"filters\": [\n       "
  },
  {
    "path": "Experiments/STSGCN/config/PEMS04/individual_GLU_mask_emb.json",
    "chars": 656,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"GLU\",\n    \"temporal_emb\": true,\n    \"spatial_emb\": true,\n    \"use_ma"
  },
  {
    "path": "Experiments/STSGCN/config/PEMS04/individual_relu.json",
    "chars": 533,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"relu\",\n    \"first_layer_embedding_size\": 64,\n    \"filters\": [\n      "
  },
  {
    "path": "Experiments/STSGCN/config/PEMS04/sharing_GLU.json",
    "chars": 529,
    "preview": "{\n    \"module_type\": \"sharing\",\n    \"act_type\": \"GLU\",\n    \"first_layer_embedding_size\": 64,\n    \"filters\": [\n        [6"
  },
  {
    "path": "Experiments/STSGCN/config/PEMS04/sharing_relu.json",
    "chars": 530,
    "preview": "{\n    \"module_type\": \"sharing\",\n    \"act_type\": \"relu\",\n    \"first_layer_embedding_size\": 64,\n    \"filters\": [\n        ["
  },
  {
    "path": "Experiments/STSGCN/config/PEMS07/individual_GLU_mask_emb.json",
    "chars": 661,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"GLU\",\n    \"temporal_emb\": true,\n    \"spatial_emb\": true,\n    \"use_ma"
  },
  {
    "path": "Experiments/STSGCN/config/PEMS08/individual_GLU_mask_emb.json",
    "chars": 656,
    "preview": "{\n    \"module_type\": \"individual\",\n    \"act_type\": \"GLU\",\n    \"temporal_emb\": true,\n    \"spatial_emb\": true,\n    \"use_ma"
  },
  {
    "path": "Experiments/ST_MGCN/ST_MGCN_Obj.py",
    "chars": 7139,
    "preview": "import os\r\nimport nni\r\nimport GPUtil\r\nimport numpy as np\r\n\r\nfrom UCTB.dataset import NodeTrafficLoader\r\nfrom UCTB.model "
  },
  {
    "path": "Experiments/ST_MGCN/bike_trial.py",
    "chars": 3309,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python ST_MGCN_Obj.py '\n        "
  },
  {
    "path": "Experiments/ST_MGCN/cs_trial.py",
    "chars": 1233,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python ST_MGCN_Obj.py '\n        "
  },
  {
    "path": "Experiments/ST_MGCN/didi_trial.py",
    "chars": 2296,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python ST_MGCN_Obj.py '\n        "
  },
  {
    "path": "Experiments/ST_MGCN/metr_trial.py",
    "chars": 1601,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python ST_MGCN_Obj.py '\n        "
  },
  {
    "path": "Experiments/ST_MGCN/metro_trial.py",
    "chars": 2326,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python ST_MGCN_Obj.py '\n        "
  },
  {
    "path": "Experiments/ST_MGCN/param_search.yml",
    "chars": 396,
    "preview": "authorName: DiChai\nexperimentName: network_search\ntrialConcurrency: 2\nmaxExecDuration: 24h\nmaxTrialNum: 50\ntrainingServi"
  },
  {
    "path": "Experiments/ST_MGCN/params_search.json",
    "chars": 249,
    "preview": "{\n    \"K\": {\"_type\":\"choice\",\"_value\":[3]},\n\n    \"L\": {\"_type\":\"choice\",\"_value\":[1]},\n\n    \"lr\": {\"_type\":\"choice\",\"_va"
  },
  {
    "path": "Experiments/ST_MGCN/pems_trial.py",
    "chars": 1605,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python ST_MGCN_Obj.py '\n        "
  },
  {
    "path": "Experiments/ST_MGCN/street_didi_trial.py",
    "chars": 2007,
    "preview": "import os\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nshared_params_st_mgcn = ('python ST_MGCN_Obj.py '\n        "
  },
  {
    "path": "Experiments/ST_ResNet/ST_ResNet.py",
    "chars": 3513,
    "preview": "import nni\nimport GPUtil\n\nfrom UCTB.dataset import GridTrafficLoader\nfrom UCTB.model import ST_ResNet\nfrom UCTB.evaluati"
  },
  {
    "path": "Experiments/ST_ResNet/param_search.yml",
    "chars": 379,
    "preview": "authorName: DiChai\r\nexperimentName: search_space\r\ntrialConcurrency: 1\r\nmaxExecDuration: 24h\r\nmaxTrialNum: 200\r\ntrainingS"
  },
  {
    "path": "Experiments/ST_ResNet/search_space.json",
    "chars": 357,
    "preview": "{\r\n    \"num_residual_unit\": {\"_type\":\"choice\",\"_value\":[2, 3, 4, 5, 6]},\r\n\r\n    \"conv_filters\": {\"_type\":\"choice\",\"_valu"
  },
  {
    "path": "Experiments/StabilityTest/CPT_AMulti_GCLSTM_Obj.py",
    "chars": 8389,
    "preview": "import os\r\nimport numpy as np\r\n\r\nfrom UCTB.dataset import NodeTrafficLoader_CPT\r\nfrom UCTB.model import STMeta_V1\r\nfrom "
  },
  {
    "path": "Experiments/StabilityTest/CPT_AMulti_GCLSTM_Simplify_Obj.py",
    "chars": 8388,
    "preview": "import os\r\nimport numpy as np\r\n\r\nfrom UCTB.dataset import NodeTrafficLoader_CPT\r\nfrom UCTB.model import STMeta_V2\r\nfrom "
  },
  {
    "path": "Experiments/StabilityTest/Master_CS_0.py",
    "chars": 1271,
    "preview": "import os\r\n\r\nimport warnings\r\nwarnings.filterwarnings(\"ignore\")\r\n\r\nshared_params = ('python CPT_STMeta_Simplify_Obj.py '"
  },
  {
    "path": "Experiments/StabilityTest/Master_DiDi_0.py",
    "chars": 1192,
    "preview": "# _*_ coding:utf-8 _*_\r\nimport os\r\n\r\nimport warnings\r\nwarnings.filterwarnings(\"ignore\")\r\n\r\nshared_params = ('python STMe"
  },
  {
    "path": "Experiments/StabilityTest/Master_Metro_0.py",
    "chars": 1212,
    "preview": "# _*_ coding:utf-8 _*_\r\nimport os\r\n\r\nimport warnings\r\nwarnings.filterwarnings(\"ignore\")\r\n\r\nshared_params = ('python CPT_"
  },
  {
    "path": "Experiments/V3_GACN/GACN_Master.py",
    "chars": 1127,
    "preview": "import os\r\n\r\nimport warnings\r\nwarnings.filterwarnings(\"ignore\")\r\n\r\nshared_params_gacn = ('python V3_GACN.py '\r\n         "
  },
  {
    "path": "Experiments/V3_GACN/GACN_Obj.py",
    "chars": 4486,
    "preview": "import os\r\n\r\nfrom UCTB.dataset import NodeTrafficLoader\r\nfrom UCTB.model import GACN\r\nfrom UCTB.evaluation import metric"
  },
  {
    "path": "Experiments/XGBoost/XGBoost.py",
    "chars": 3380,
    "preview": "import numpy as np\nimport argparse\nfrom UCTB.dataset import NodeTrafficLoader\nfrom UCTB.model import XGBoost\nfrom UCTB.e"
  },
  {
    "path": "Experiments/XGBoost/xgboost_config.yml",
    "chars": 1407,
    "preview": "authorName: lychen\nexperimentName: xgboost_parameter_search\ntrialConcurrency: 2\nmaxExecDuration: 24h\nmaxTrialNum: 200\ntr"
  },
  {
    "path": "Experiments/XGBoost/xgboost_search_space.json",
    "chars": 271,
    "preview": "{\n\n    \"CT\": {\"_type\": \"randint\", \"_value\": [0,13]},\n    \"PT\": {\"_type\": \"randint\", \"_value\": [0,15]},\n    \"TT\": {\"_type"
  },
  {
    "path": "LICENSE",
    "chars": 1068,
    "preview": "MIT License\n\nCopyright (c) [2019] [fullname]\n\nPermission is hereby granted, free of charge, to any person obtaining a co"
  },
  {
    "path": "QuickStarts/ARIMA.py",
    "chars": 1097,
    "preview": "import numpy as np\n\nfrom UCTB.model import ARIMA\nfrom UCTB.dataset import NodeTrafficLoader\nfrom UCTB.evaluation import "
  },
  {
    "path": "QuickStarts/DCRNN.py",
    "chars": 3869,
    "preview": "import numpy as np\n\nfrom UCTB.dataset import NodeTrafficLoader\nfrom UCTB.model import DCRNN\nfrom UCTB.evaluation import "
  },
  {
    "path": "QuickStarts/DeepST.py",
    "chars": 1711,
    "preview": "from UCTB.dataset import GridTrafficLoader\nfrom UCTB.model import DeepST\nfrom UCTB.evaluation import metric\n\n# Config da"
  },
  {
    "path": "QuickStarts/GBRT.py",
    "chars": 1466,
    "preview": "import numpy as np\n\nfrom sklearn.ensemble import GradientBoostingRegressor\n\nfrom UCTB.dataset import NodeTrafficLoader\nf"
  },
  {
    "path": "QuickStarts/GeoMAN.py",
    "chars": 6949,
    "preview": "from UCTB.dataset import NodeTrafficLoader\nfrom UCTB.preprocess import MoveSample\nfrom UCTB.model import GeoMAN\nfrom UCT"
  },
  {
    "path": "QuickStarts/GraphWaveNet.py",
    "chars": 2008,
    "preview": "from UCTB.dataset import NodeTrafficLoader\nfrom UCTB.model.GraphWaveNet import gwnet\nfrom UCTB.evaluation import metric\n"
  },
  {
    "path": "QuickStarts/HM.py",
    "chars": 769,
    "preview": "from UCTB.dataset import NodeTrafficLoader\r\nfrom UCTB.model import HM\r\nfrom UCTB.evaluation import metric\r\nfrom UCTB.uti"
  },
  {
    "path": "QuickStarts/HMM.py",
    "chars": 1215,
    "preview": "import numpy as np\r\n\r\nfrom UCTB.dataset import NodeTrafficLoader\r\nfrom UCTB.model import HMM\r\nfrom UCTB.evaluation impor"
  },
  {
    "path": "QuickStarts/STMeta.py",
    "chars": 2039,
    "preview": "from UCTB.dataset import NodeTrafficLoader\r\nfrom UCTB.model import STMeta\r\nfrom UCTB.evaluation import metric\r\nfrom UCTB"
  },
  {
    "path": "QuickStarts/ST_ResNet.py",
    "chars": 1719,
    "preview": "from UCTB.dataset import GridTrafficLoader\nfrom UCTB.model import ST_ResNet\nfrom UCTB.evaluation import metric\n\n# Config"
  },
  {
    "path": "QuickStarts/Visualization.py",
    "chars": 235,
    "preview": "from UCTB.dataset import NodeTrafficLoader\r\n#from UCTB.utils import st_map\r\n\r\nfrom dateutil.parser import parse\r\n\r\n# Con"
  },
  {
    "path": "QuickStarts/XGBoost.py",
    "chars": 1280,
    "preview": "import numpy as np\r\n\r\nfrom UCTB.dataset import NodeTrafficLoader\r\nfrom UCTB.model import XGBoost\r\nfrom UCTB.evaluation i"
  },
  {
    "path": "QuickStarts/XGBoost_Validate.py",
    "chars": 2288,
    "preview": "import numpy as np\r\n\r\nfrom UCTB.dataset import NodeTrafficLoader\r\nfrom UCTB.model import XGBoost\r\nfrom UCTB.evaluation i"
  },
  {
    "path": "README.md",
    "chars": 10126,
    "preview": "# UCTB (Urban Computing Tool Box)\n\n [![Python](https://img.shields.io/badge/python-3.6%7C3.7-blue)]() [![PyPI](https://i"
  },
  {
    "path": "UCTB/__init__.py",
    "chars": 195,
    "preview": "\r\nfrom . import dataset\r\n\r\nfrom . import evaluation\r\nfrom . import model\r\nfrom . import model_unit\r\n\r\nfrom . import trai"
  },
  {
    "path": "UCTB/dataset/__init__.py",
    "chars": 111,
    "preview": "from .data_loader import NodeTrafficLoader, TransferDataLoader, GridTrafficLoader\r\nfrom .dataset import DataSet"
  },
  {
    "path": "UCTB/dataset/context_loader.py",
    "chars": 3527,
    "preview": "from abc import ABC, abstractmethod\n\nclass TemporalContextLoader(ABC):\n\n    def __init__(self, traffic_dataloader):\n\n   "
  },
  {
    "path": "UCTB/dataset/data_loader.py",
    "chars": 28481,
    "preview": "import os\nimport copy\nimport datetime\nimport numpy as np\nfrom dateutil.parser import parse\nfrom sklearn.metrics.pairwise"
  },
  {
    "path": "UCTB/dataset/dataset.py",
    "chars": 6510,
    "preview": "import os\r\nimport wget\r\nimport pickle\r\nimport tarfile\r\nimport numpy as np\r\n\r\n\r\nclass DataSet(object):\r\n    \"\"\"An object "
  },
  {
    "path": "UCTB/evaluation/__init__.py",
    "chars": 20,
    "preview": "from . import metric"
  },
  {
    "path": "UCTB/evaluation/metric.py",
    "chars": 4642,
    "preview": "\r\nimport numpy as np\r\n\r\ndef rmse(prediction, target, threshold=None):\r\n    \"\"\"\r\n    Root Mean Square Error (RMSE)\r\n    \r"
  },
  {
    "path": "UCTB/model/AGCRN.py",
    "chars": 6756,
    "preview": "import torch\nimport torch.nn.functional as F\nimport torch.nn as nn\nimport torch\n\nclass AVWGCN(nn.Module):\n    def __init"
  },
  {
    "path": "UCTB/model/ARIMA.py",
    "chars": 5866,
    "preview": "import numpy as np\r\nimport pandas as pd\r\nimport statsmodels.api as sm\r\n\r\nimport warnings\r\nwarnings.filterwarnings(\"ignor"
  },
  {
    "path": "UCTB/model/ASTGCN.py",
    "chars": 11737,
    "preview": "# -*- coding:utf-8 -*-\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport numpy as np\nimport torc"
  },
  {
    "path": "UCTB/model/DCRNN.py",
    "chars": 8675,
    "preview": "import tensorflow as tf\n\nfrom ..model_unit import BaseModel\nfrom ..model_unit import DCGRUCell\n\nfrom tensorflow.contrib "
  },
  {
    "path": "UCTB/model/DeepST.py",
    "chars": 8533,
    "preview": "import os\r\nimport tensorflow as tf\r\n\r\nfrom ..model_unit import BaseModel\r\n\r\n\r\nclass DeepST(BaseModel):\r\n    \"\"\"Deep lear"
  },
  {
    "path": "UCTB/model/GMAN.py",
    "chars": 15087,
    "preview": "\nimport tensorflow as tf\nimport numpy as np\nimport random\n\nclass Graph():\n\tdef __init__(self, nx_G, is_directed, p, q):\n"
  },
  {
    "path": "UCTB/model/GeoMAN.py",
    "chars": 23966,
    "preview": "import tensorflow as tf\nfrom tensorflow.contrib.framework import nest\nfrom ..model_unit import BaseModel\n\n\nclass GeoMAN("
  },
  {
    "path": "UCTB/model/GraphWaveNet.py",
    "chars": 8852,
    "preview": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass nconv(nn.Module):\n    def __init__(self):\n    "
  },
  {
    "path": "UCTB/model/HM.py",
    "chars": 1402,
    "preview": "import numpy as np\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\n\nclass HM(object):\n    '''\n    Historical Mean. A"
  },
  {
    "path": "UCTB/model/HMM.py",
    "chars": 1105,
    "preview": "import numpy as np\r\nfrom hmmlearn import hmm\r\n\r\n\r\nclass HMM(object):\r\n    def __init__(self, num_components, n_iter, hmm"
  }
]

// ... and 318 more files (download for full content)

About this extraction

This page contains the full source code of the uctb/UCTB GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 518 files (6.7 MB), approximately 1.8M tokens, and a symbol index with 1453 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!