Skip to content
Toggle navigation
Toggle navigation
This project
Loading...
Sign in
Administrator
/
chatgpt.ai-pro.org
Go to a project
Toggle navigation
Toggle navigation pinning
Projects
Groups
Snippets
Help
Project
Activity
Repository
Pipelines
Graphs
Issues
0
Merge Requests
3
Wiki
Network
Create a new issue
Builds
Commits
Issue Boards
Files
Commits
Network
Compare
Branches
Tags
43da8db2
authored
2024-10-01 15:05:14 +0800
by
Bryan Batac
Browse Files
Options
Browse Files
Tag
Download
Email Patches
Plain Diff
- fix the issue of model not responding when temperature is cahnged
- added error log
1 parent
84486d88
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
12 additions
and
12 deletions
index.js
index.js
View file @
43da8db
...
...
@@ -89,11 +89,11 @@ const user_secret_id = process.env.USER_SECRET_ID || "aiwp_logged_in";
const
aiwp_app_id
=
"chatbot+"
;
// Primary Open AI Route
app
.
post
(
'/api'
,
async
(
req
,
res
)
=>
{
//
if(!req.get('origin') || (!req.get('origin').includes(req.get('host')))) {
//
res.status(401);
//
res.send('Method Not Allowed');
//
return;
//
}
if
(
!
req
.
get
(
'origin'
)
||
(
!
req
.
get
(
'origin'
).
includes
(
req
.
get
(
'host'
))))
{
res
.
status
(
401
);
res
.
send
(
'Method Not Allowed'
);
return
;
}
const
{
message
,
currentModel
,
temperature
}
=
req
.
body
;
const
getEndpoint
=
(
modelName
)
=>
{
const
model
=
Object
.
values
(
filteredModels
).
find
(
m
=>
m
.
model
===
modelName
);
...
...
@@ -151,7 +151,7 @@ app.post('/api', async (req, res) => {
model
:
`
${
currentModel
}
`
,
// "text-davinci-003",
prompt
:
query_prompt
,
max_tokens
:
max_tokens
,
temperature
,
temperature
:
parseFloat
(
temperature
)
,
});
let
input
=
response
.
data
.
choices
[
0
].
text
;
let
usage
=
{};
...
...
@@ -191,7 +191,7 @@ app.post('/api', async (req, res) => {
message
:
"The output for your prompt is too long for us to process. Please reduce your prompt and try again."
,
})
}
else
{
//
console.log(e.response);
console
.
log
(
e
.
response
);
}
}
finally
{
// console.log('We do cleanup here');
...
...
@@ -225,7 +225,7 @@ async function runGPTTurbo(req, res) {
model
:
`
${
currentModel
}
`
,
messages
:
JSON
.
parse
(
message
),
max_tokens
:
max_tokens
,
temperature
temperature
:
parseFloat
(
temperature
),
});
input
=
response
.
data
.
choices
[
0
].
message
.
content
}
catch
(
e
)
{
...
...
@@ -233,7 +233,7 @@ async function runGPTTurbo(req, res) {
if
(
error_msg
.
indexOf
(
'maximum context length'
)
>=
0
)
{
input
=
"The output for your prompt is too long for us to process. Please reduce your prompt and try again."
;
}
else
{
//
console.log(e.response);
console
.
log
(
e
.
response
);
}
}
finally
{
...
...
@@ -300,7 +300,7 @@ async function runOpensource(req, res) {
model
:
currentModel
,
messages
:
JSON
.
parse
(
message
),
max_tokens
:
2048
,
temperature
,
temperature
:
parseFloat
(
temperature
)
,
top_p
:
0.7
,
top_k
:
50
,
repetition_penalty
:
1
...
...
@@ -334,7 +334,7 @@ async function runOpensource(req, res) {
if
(
error_msg
.
indexOf
(
'maximum context length'
)
>=
0
)
{
input
=
"The output for your prompt is too long for us to process. Please reduce your prompt and try again."
;
}
else
{
//
console.log(e.response);
console
.
log
(
e
.
response
);
}
}
finally
{
let
usage_params
=
{
...
...
@@ -440,7 +440,7 @@ async function setChatUsage(params) {
async
function
validation
(
aiwp_app_id
,
req
,
res
)
{
const
aiwp_logged_in
=
req
.
cookies
[
user_secret_id
]
?
decodeURIComponent
(
req
.
cookies
[
user_secret_id
])
:
""
;
const
limit
=
req
.
cookies
[
"WcvYPABR"
]
?
parseInt
(
req
.
cookies
[
"WcvYPABR"
].
replace
(
/
\D
/g
,
''
))
:
9999999999999999999
;
const
limit
=
req
.
cookies
[
"WcvYPABR"
]
?
parseInt
(
req
.
cookies
[
"WcvYPABR"
].
replace
(
/
\D
/g
,
''
))
:
3
;
// change this to 99999 if working on dev
let
IS_FREE_USER
=
false
;
let
TRIED_USAGE
=
0
;
const
ip_address
=
getClientIP
(
req
);
...
...
Write
Preview
Styling with
Markdown
is supported
Attach a file
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to post a comment