File tree Expand file tree Collapse file tree 2 files changed +44
-25
lines changed
Expand file tree Collapse file tree 2 files changed +44
-25
lines changed Original file line number Diff line number Diff line change @@ -4,7 +4,9 @@ const dialog = ref(false)
44const currentModel = useCurrentModel ()
55const availableModels = [
66 ' gpt-3.5-turbo' ,
7- ' gpt-4'
7+ ' gpt-4' ,
8+ ' gpt-3.5-turbo-16k' ,
9+ ' gpt-4-32k'
810]
911const currentModelDefault = ref (MODELS [currentModel .value .name ])
1012
@@ -198,4 +200,4 @@ onNuxtReady(() => {
198200
199201<style scoped>
200202
201- </style >
203+ </style >
Original file line number Diff line number Diff line change 1-
21export const STORAGE_KEY = {
3- MODELS : 'models' ,
4- CURRENT_MODEL : 'current_model' ,
5- OPENAI_API_KEY : 'openai_api_key' ,
2+ MODELS : 'models' ,
3+ CURRENT_MODEL : 'current_model' ,
4+ OPENAI_API_KEY : 'openai_api_key' ,
65}
76
87export const MODELS = {
9- 'gpt-3.5-turbo' : {
10- name : 'gpt-3.5-turbo' ,
11- frequency_penalty : 0.0 ,
12- presence_penalty : 0.0 ,
13- total_tokens : 4096 ,
14- max_tokens : 1000 ,
15- temperature : 0.7 ,
16- top_p : 1.0
17- } ,
18- 'gpt-4' : {
19- name : 'gpt-4' ,
20- frequency_penalty : 0.0 ,
21- presence_penalty : 0.0 ,
22- total_tokens : 8192 ,
23- max_tokens : 2000 ,
24- temperature : 0.7 ,
25- top_p : 1.0
26- }
8+ 'gpt-3.5-turbo' : {
9+ name : 'gpt-3.5-turbo' ,
10+ frequency_penalty : 0.0 ,
11+ presence_penalty : 0.0 ,
12+ total_tokens : 4096 ,
13+ max_tokens : 1000 ,
14+ temperature : 0.7 ,
15+ top_p : 1.0
16+ } ,
17+ 'gpt-3.5-turbo-16k' : {
18+ name : 'gpt-3.5-16k' ,
19+ frequency_penalty : 0.0 ,
20+ presence_penalty : 0.0 ,
21+ total_tokens : 16384 ,
22+ max_tokens : 4000 ,
23+ temperature : 0.7 ,
24+ top_p : 1.0
25+ } ,
26+ 'gpt-4-32k' : {
27+ name : 'gpt-4-32k' ,
28+ frequency_penalty : 0.0 ,
29+ presence_penalty : 0.0 ,
30+ total_tokens : 32768 ,
31+ max_tokens : 8000 ,
32+ temperature : 0.7 ,
33+ top_p : 1.0
34+ } ,
35+ 'gpt-4' : {
36+ name : 'gpt-4' ,
37+ frequency_penalty : 0.0 ,
38+ presence_penalty : 0.0 ,
39+ total_tokens : 8192 ,
40+ max_tokens : 2000 ,
41+ temperature : 0.7 ,
42+ top_p : 1.0
43+ }
2744}
2845
29- export const DEFAULT_MODEL_NAME = 'gpt-3.5-turbo'
46+ export const DEFAULT_MODEL_NAME = 'gpt-3.5-turbo'
You can’t perform that action at this time.
0 commit comments