# available dataset: "cora", "citeseer", "pubmed"
# use attention by adding argument --use_attention=True
TL_BACKEND="paddle" python hcha_trainer.py --dataset cora --lr 0.01 --l2_coef 0.01 --drop_rate 0.9
TL_BACKEND="paddle" python hcha_trainer.py --dataset citeseer --lr 0.01 --l2_coef 0.01 --drop_rate 0.7
TL_BACKEND="paddle" python hcha_trainer.py --dataset pubmed --lr 0.01 --l2_coef 0.005 --drop_rate 0.6
TL_BACKEND="torch" python hcha_trainer.py --dataset cora --lr 0.005 --l2_coef 0.01 --drop_rate 0.8
TL_BACKEND="torch" python hcha_trainer.py --dataset citeseer --lr 0.01 --l2_coef 0.01 --drop_rate 0.7
TL_BACKEND="torch" python hcha_trainer.py --dataset pubmed --lr 0.01 --l2_coef 0.002 --drop_rate 0.5
TL_BACKEND="mindspore" python hcha_trainer.py --dataset cora --lr 0.01 --l2_coef 0.01 --drop_rate 0.6
TL_BACKEND="mindspore" python hcha_trainer.py --dataset citeseer --lr 0.01 --l2_coef 0.05 --drop_rate 0.7
TL_BACKEND="mindspore" python hcha_trainer.py --dataset pubmed --lr 0.01 --l2_coef 0.01 --drop_rate 0.6