Skip to content
Toggle navigation
Toggle navigation
This project
Loading...
Sign in
Administrator
/
chatgpt.ai-pro.org
Go to a project
Toggle navigation
Toggle navigation pinning
Projects
Groups
Snippets
Help
Project
Activity
Repository
Pipelines
Graphs
Issues
0
Merge Requests
3
Wiki
Network
Create a new issue
Builds
Commits
Issue Boards
Files
Commits
Network
Compare
Branches
Tags
535ca42c
authored
2024-01-09 13:16:56 +0800
by
Jeff
Browse Files
Options
Browse Files
Tag
Download
Email Patches
Plain Diff
29049_apikey
1 parent
4ab6686e
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
33 additions
and
5 deletions
.env-template
index.js
.env-template
0 → 100644
View file @
535ca42
# Examples values are set to DEV
OPENAI_API_ORG="org-h5sz5MQR8tCfmhZZVU3FdhKz"
OPENAI_API_KEY="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"
SERVER_URL="https://dev.chatgpt.ai-pro.org/"
REACT_APP_SERVER_URL="https://dev.chatgpt.ai-pro.org"
REACT_APP_BTUTIL_ASSET_URL=https://dev.api.ai-pro.org/ext-app/js/btutil-all-v1.min.js?ver=
API_URL='https://dev.api.ai-pro.org'
OPENSOURCE_MODELS="openchat_3.5,zephyr-7B-beta"
OPENSOURCE_ENDPOINTS={"openchat_3.5": "https://openchat.llm.ai-pro.org/v1", "zephyr-7B-beta": "https://zephyr.llm.ai-pro.org/v1"}
REACT_APP_START_SERVER_URL= https://dev.start.ai-pro.org
\ No newline at end of file
index.js
View file @
535ca42
...
...
@@ -227,6 +227,12 @@ const get_endpoint_api_url = (currentModel) => {
const
endpoint_api_url
=
endpoints
?.[
currentModel
];
return
endpoint_api_url
}
const
get_endpoint_api_key
=
(
currentModel
)
=>
{
const
OPENSOURCE_API_KEY
=
process
.
env
.
OPENSOURCE_API_KEY
;
const
api_keys
=
JSON
.
parse
(
OPENSOURCE_API_KEY
);
const
key
=
api_keys
?.[
currentModel
];
return
key
}
async
function
runOpensource
(
req
,
res
)
{
const
{
message
,
currentModel
,
temperature
}
=
req
.
body
;
var
input
=
''
;
...
...
@@ -234,19 +240,27 @@ async function runOpensource(req, res) {
const
query_prompt
=
message_history
.
length
?
message_history
[
message_history
.
length
-
1
].
content
:
""
;
try
{
let
error_msg
=
""
;
const
endpoint_api_url
=
get_endpoint_api_url
(
currentModel
);
cons
ole
.
log
(
'endpoint_api_url'
,
endpoint_api_ur
l
);
cons
t
api_key
=
get_endpoint_api_key
(
currentMode
l
);
const
response
=
await
axios
.
post
(
endpoint_api_url
+
'/chat/completions'
,
{
messages
:
JSON
.
parse
(
message
),
temperature
},
{
headers
:
{
'Content-Type'
:
'application/json'
,
// 'Authorization': `Bearer ${process.env.OPENSOURCE_API_KEY}`
},
'Authorization'
:
'Bearer '
+
api_key
},
})
.
catch
(
error
=>
{
error_msg
=
error
.
response
.
statusText
?
error
.
response
.
statusText
:
''
;
});
console
.
log
(
" zephyr response"
,
response
.
data
.
choices
[
0
])
input
=
response
.
data
.
choices
[
0
].
message
.
content
if
(
error_msg
!==
''
){
input
=
"Error: "
+
error_msg
;
}
else
{
input
=
response
.
data
.
choices
[
0
].
message
.
content
}
}
catch
(
e
)
{
let
error_msg
=
e
.
response
.
data
.
error
.
message
?
e
.
response
.
data
.
error
.
message
:
''
;
if
(
error_msg
.
indexOf
(
'maximum context length'
)
>=
0
)
{
...
...
Write
Preview
Styling with
Markdown
is supported
Attach a file
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to post a comment