29049_apikey
Showing
2 changed files
with
31 additions
and
3 deletions
.env-template
0 → 100644
| 1 | # Examples values are set to DEV | ||
| 2 | |||
| 3 | OPENAI_API_ORG="org-h5sz5MQR8tCfmhZZVU3FdhKz" | ||
| 4 | OPENAI_API_KEY="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" | ||
| 5 | SERVER_URL="https://dev.chatgpt.ai-pro.org/" | ||
| 6 | REACT_APP_SERVER_URL="https://dev.chatgpt.ai-pro.org" | ||
| 7 | |||
| 8 | REACT_APP_BTUTIL_ASSET_URL=https://dev.api.ai-pro.org/ext-app/js/btutil-all-v1.min.js?ver= | ||
| 9 | |||
| 10 | API_URL='https://dev.api.ai-pro.org' | ||
| 11 | |||
| 12 | OPENSOURCE_MODELS="openchat_3.5,zephyr-7B-beta" | ||
| 13 | OPENSOURCE_ENDPOINTS={"openchat_3.5": "https://openchat.llm.ai-pro.org/v1", "zephyr-7B-beta": "https://zephyr.llm.ai-pro.org/v1"} | ||
| 14 | REACT_APP_START_SERVER_URL= https://dev.start.ai-pro.org | ||
| ... | \ No newline at end of file | ... | \ No newline at end of file |
| ... | @@ -227,6 +227,12 @@ const get_endpoint_api_url = (currentModel) => { | ... | @@ -227,6 +227,12 @@ const get_endpoint_api_url = (currentModel) => { |
| 227 | const endpoint_api_url = endpoints?.[currentModel]; | 227 | const endpoint_api_url = endpoints?.[currentModel]; |
| 228 | return endpoint_api_url | 228 | return endpoint_api_url |
| 229 | } | 229 | } |
| 230 | const get_endpoint_api_key = (currentModel) => { | ||
| 231 | const OPENSOURCE_API_KEY = process.env.OPENSOURCE_API_KEY; | ||
| 232 | const api_keys = JSON.parse(OPENSOURCE_API_KEY); | ||
| 233 | const key = api_keys?.[currentModel]; | ||
| 234 | return key | ||
| 235 | } | ||
| 230 | async function runOpensource(req, res) { | 236 | async function runOpensource(req, res) { |
| 231 | const { message, currentModel, temperature } = req.body; | 237 | const { message, currentModel, temperature } = req.body; |
| 232 | var input = ''; | 238 | var input = ''; |
| ... | @@ -234,19 +240,27 @@ async function runOpensource(req, res) { | ... | @@ -234,19 +240,27 @@ async function runOpensource(req, res) { |
| 234 | const query_prompt = message_history.length ? message_history[message_history.length - 1].content : ""; | 240 | const query_prompt = message_history.length ? message_history[message_history.length - 1].content : ""; |
| 235 | 241 | ||
| 236 | try { | 242 | try { |
| 243 | let error_msg = ""; | ||
| 237 | const endpoint_api_url = get_endpoint_api_url(currentModel); | 244 | const endpoint_api_url = get_endpoint_api_url(currentModel); |
| 238 | console.log('endpoint_api_url', endpoint_api_url); | 245 | const api_key = get_endpoint_api_key(currentModel); |
| 239 | const response = await axios.post(endpoint_api_url + '/chat/completions', { | 246 | const response = await axios.post(endpoint_api_url + '/chat/completions', { |
| 240 | messages: JSON.parse(message), | 247 | messages: JSON.parse(message), |
| 241 | temperature | 248 | temperature |
| 242 | }, { | 249 | }, { |
| 243 | headers: { | 250 | headers: { |
| 244 | 'Content-Type': 'application/json', | 251 | 'Content-Type': 'application/json', |
| 245 | // 'Authorization': `Bearer ${process.env.OPENSOURCE_API_KEY}` | 252 | 'Authorization': 'Bearer '+api_key |
| 246 | }, | 253 | }, |
| 254 | }) .catch(error => { | ||
| 255 | error_msg = error.response.statusText ? error.response.statusText : ''; | ||
| 247 | }); | 256 | }); |
| 248 | console.log(" zephyr response", response.data.choices[0]) | 257 | |
| 258 | if (error_msg!==''){ | ||
| 259 | input = "Error: "+error_msg; | ||
| 260 | }else{ | ||
| 249 | input = response.data.choices[0].message.content | 261 | input = response.data.choices[0].message.content |
| 262 | } | ||
| 263 | |||
| 250 | } catch (e) { | 264 | } catch (e) { |
| 251 | let error_msg = e.response.data.error.message ? e.response.data.error.message : ''; | 265 | let error_msg = e.response.data.error.message ? e.response.data.error.message : ''; |
| 252 | if (error_msg.indexOf('maximum context length') >= 0) { | 266 | if (error_msg.indexOf('maximum context length') >= 0) { | ... | ... |
-
Please register or sign in to post a comment