-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy pathgpt-3.5-turbo.sh
More file actions
73 lines (66 loc) · 1.75 KB
/
gpt-3.5-turbo.sh
File metadata and controls
73 lines (66 loc) · 1.75 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
python ./bigcodebench/code_score.py \
--test_case codellama--CodeLlama-13b-Instruct-hf \
--model gpt-3.5-turbo-1106 \
--step 2 \
--compare_prompt 1 \
--analyze_prompt 0 \
--temperature 0 \
--return_type bool \
--num_samples 1
python ./bigcodebench/code_score.py \
--test_case codellama--CodeLlama-13b-Instruct-hf \
--model gpt-3.5-turbo-1106 \
--step 2 \
--compare_prompt 0 \
--analyze_prompt 0 \
--temperature 0 \
--return_type bool \
--num_samples 1
python ./bigcodebench/code_score.py \
--test_case codellama--CodeLlama-13b-Instruct-hf \
--model gpt-3.5-turbo-1106 \
--step 1 \
--compare_prompt 0 \
--temperature 0 \
--return_type bool \
--num_samples 1
python ./bigcodebench/code_score.py \
--test_case codellama--CodeLlama-13b-Instruct-hf \
--model gpt-3.5-turbo-1106 \
--step 1 \
--compare_prompt 1 \
--temperature 0 \
--return_type bool \
--num_samples 1
python ./bigcodebench/code_score.py \
--test_case codellama--CodeLlama-13b-Instruct-hf \
--model gpt-3.5-turbo-1106 \
--step 1 \
--compare_prompt 2 \
--temperature 0 \
--return_type "0_to_4_score_functional_correctness" \
--num_samples 1
python ./bigcodebench/code_score.py \
--test_case codellama--CodeLlama-13b-Instruct-hf \
--model gpt-3.5-turbo-1106 \
--step 1 \
--compare_prompt 3 \
--temperature 0 \
--return_type "0_to_4_score_functional_correctness" \
--num_samples 1
python ./bigcodebench/code_score.py \
--test_case codellama--CodeLlama-13b-Instruct-hf \
--model gpt-3.5-turbo-1106 \
--step 1 \
--compare_prompt 4 \
--temperature 0 \
--return_type "inconsistency_level" \
--num_samples 1
python ./bigcodebench/code_score.py \
--test_case codellama--CodeLlama-13b-Instruct-hf \
--model gpt-3.5-turbo-1106 \
--step 1 \
--compare_prompt 5 \
--temperature 0 \
--return_type "inconsistency_level" \
--num_samples 1