-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbaichuan2_base_inference.sh
executable file
·82 lines (73 loc) · 2.49 KB
/
baichuan2_base_inference.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
models=('baichuan2_13b_base' 'baichuan2_7b_base' )
langs=( 'PL_trans' 'RU_trans' 'ZH_trans' 'ar_trans' 'he_trans' 'en' 'DE_trans' 'JA_trans' 'FR_trans' 'IT_trans')
task="x_csqa"
for model in ${models[*]}
do
for lang in ${langs[*]}
do
echo $task
echo $lang
echo $model
CUDA_VISIBLE_DEVICES=3 python inference.py \
--input_path "../${task}/${lang}/chatgpt_res.jsonl" \
--output_path "../${task}/${lang}/${model}_res_bf16_add_simple_example.jsonl" \
--model_name ${model} \
--add_simple_example \
--task ${task} \
>> ../${task}/${lang}/${model}_inference_bf16_add_simple_example.log
done
done
langs=( 'it' 'pl' 'ru' 'en' 'de' 'ja' 'fr' 'zh' 'ar' 'he')
task="x_geo"
for model in ${models[*]}
do
for lang in ${langs[*]}
do
echo $task
echo $lang
echo $model
CUDA_VISIBLE_DEVICES=3 python inference.py \
--input_path "../${task}/${lang}/generate_input.jsonl" \
--output_path "../${task}/${lang}/${model}_res_bf16_add_simple_example.jsonl" \
--model_name ${model} \
--add_simple_example \
--task ${task} \
>> ../${task}/${lang}/${model}_inference_bf16_add_simple_example.log
done
done
langs=( 'it' 'pl' 'ru' 'en' 'de' 'ja' 'fr' 'zh' 'ar' 'he')
task="x_name"
for model in ${models[*]}
do
for lang in ${langs[*]}
do
echo $task
echo $lang
echo $model
CUDA_VISIBLE_DEVICES=3 python inference.py \
--input_path "../${task}/${lang}/generate_input.jsonl" \
--output_path "../${task}/${lang}/${model}_res_bf16_add_simple_example.jsonl" \
--model_name ${model} \
--add_simple_example \
--task ${task} \
>> ../${task}/${lang}/${model}_inference_bf16_add_simple_example.log
done
done
langs=( 'DE' 'JA' 'FR' 'PL' 'RU' 'ar' 'he' 'it' 'en' 'zh')
task="x_copa"
for model in ${models[*]}
do
for lang in ${langs[*]}
do
echo $task
echo $lang
echo $model
CUDA_VISIBLE_DEVICES=3 python inference.py \
--input_path "../${task}/${lang}/generate_input.jsonl" \
--output_path "../${task}/${lang}/${model}_res_bf16_add_simple_example.jsonl" \
--model_name ${model} \
--add_simple_example \
--task ${task} \
>> ../${task}/${lang}/${model}_inference_bf16_add_simple_example.log
done
done