Merge branch '31531_apply_tokenization_on_other_llms' into 'master'
#31531 - Apply tokenization logic on other LLMs See merge request !112
Showing
3 changed files
with
45 additions
and
16 deletions
| ... | @@ -6,6 +6,7 @@ | ... | @@ -6,6 +6,7 @@ |
| 6 | "@testing-library/jest-dom": "^5.16.5", | 6 | "@testing-library/jest-dom": "^5.16.5", |
| 7 | "@testing-library/react": "^13.4.0", | 7 | "@testing-library/react": "^13.4.0", |
| 8 | "@testing-library/user-event": "^13.5.0", | 8 | "@testing-library/user-event": "^13.5.0", |
| 9 | "js-cookie": "^3.0.5", | ||
| 9 | "react": "^18.2.0", | 10 | "react": "^18.2.0", |
| 10 | "react-dom": "^18.2.0", | 11 | "react-dom": "^18.2.0", |
| 11 | "react-scripts": "5.0.1", | 12 | "react-scripts": "5.0.1", | ... | ... |
| ... | @@ -4,11 +4,13 @@ import './color_theme_1.css'; | ... | @@ -4,11 +4,13 @@ import './color_theme_1.css'; |
| 4 | import { useState, useEffect } from 'react'; | 4 | import { useState, useEffect } from 'react'; |
| 5 | import SideMenu from './SideMenu' | 5 | import SideMenu from './SideMenu' |
| 6 | import ChatBox from './ChatBox' | 6 | import ChatBox from './ChatBox' |
| 7 | import Cookies from 'js-cookie' | ||
| 7 | 8 | ||
| 8 | function App() { | 9 | function App() { |
| 9 | 10 | ||
| 10 | useEffect(() => { | 11 | useEffect(() => { |
| 11 | getEngines(); | 12 | getEngines(); |
| 13 | displayTogetherAiResponse() | ||
| 12 | }, []) | 14 | }, []) |
| 13 | 15 | ||
| 14 | const [chatInput, setChatInput] = useState(""); | 16 | const [chatInput, setChatInput] = useState(""); |
| ... | @@ -30,6 +32,19 @@ function App() { | ... | @@ -30,6 +32,19 @@ function App() { |
| 30 | 32 | ||
| 31 | const [chatLogOpenSource, setChatLogOpenSource] = useState([]); | 33 | const [chatLogOpenSource, setChatLogOpenSource] = useState([]); |
| 32 | 34 | ||
| 35 | const displayTogetherAiResponse = () => { | ||
| 36 | const queryParams = new URLSearchParams(window.location.search) | ||
| 37 | const cookieName = 'P6XcW47o' | ||
| 38 | const exists = queryParams.has(cookieName) | ||
| 39 | const enable = queryParams.get(cookieName) | ||
| 40 | |||
| 41 | if (exists && (enable == '1' || enable == '0')) { | ||
| 42 | Cookies.set(cookieName, enable, { | ||
| 43 | expires: 1 | ||
| 44 | }) | ||
| 45 | } | ||
| 46 | } | ||
| 47 | |||
| 33 | function clearChat(){ | 48 | function clearChat(){ |
| 34 | setChatLog([]); | 49 | setChatLog([]); |
| 35 | setChatLogTurbo(defaultChatLogTurbo); | 50 | setChatLogTurbo(defaultChatLogTurbo); |
| ... | @@ -115,6 +130,8 @@ function App() { | ... | @@ -115,6 +130,8 @@ function App() { |
| 115 | let intervalId = startInterval(); | 130 | let intervalId = startInterval(); |
| 116 | 131 | ||
| 117 | try { | 132 | try { |
| 133 | const togetherAiResponse = Cookies.get('P6XcW47o') === '1' | ||
| 134 | |||
| 118 | const response = await fetch(process.env.REACT_APP_SERVER_URL + "/api", { | 135 | const response = await fetch(process.env.REACT_APP_SERVER_URL + "/api", { |
| 119 | method: "POST", | 136 | method: "POST", |
| 120 | headers: { | 137 | headers: { |
| ... | @@ -123,12 +140,19 @@ function App() { | ... | @@ -123,12 +140,19 @@ function App() { |
| 123 | body: JSON.stringify({ | 140 | body: JSON.stringify({ |
| 124 | message: messages, | 141 | message: messages, |
| 125 | currentModel, | 142 | currentModel, |
| 126 | temperature | 143 | temperature, |
| 144 | P6XcW47o: togetherAiResponse | ||
| 127 | }) | 145 | }) |
| 128 | }); | 146 | }); |
| 129 | const data = await response.json(); | 147 | const data = await response.json(); |
| 130 | const parsedData = data.message ? data.message.trim() : ""; | 148 | const parsedData = data.message ? data.message.trim() : ""; |
| 131 | 149 | ||
| 150 | if (togetherAiResponse) { | ||
| 151 | if (currentModel !== 'gpt-3.5-turbo') { | ||
| 152 | console.log(data) | ||
| 153 | } | ||
| 154 | } | ||
| 155 | |||
| 132 | if(data.status === 'invalid'){ | 156 | if(data.status === 'invalid'){ |
| 133 | if(data.limited) { | 157 | if(data.limited) { |
| 134 | window.btutil_modalRegisterUpgrade(false); | 158 | window.btutil_modalRegisterUpgrade(false); | ... | ... |
| ... | @@ -272,7 +272,7 @@ const get_endpoint_api_key = (currentModel) => { | ... | @@ -272,7 +272,7 @@ const get_endpoint_api_key = (currentModel) => { |
| 272 | return key | 272 | return key |
| 273 | } | 273 | } |
| 274 | async function runOpensource(req, res) { | 274 | async function runOpensource(req, res) { |
| 275 | const { message, currentModel, temperature } = req.body; | 275 | const { message, currentModel, temperature, P6XcW47o: together_ai_response = null } = req.body; |
| 276 | var input = ''; | 276 | var input = ''; |
| 277 | const message_history = JSON.parse(message); | 277 | const message_history = JSON.parse(message); |
| 278 | const query_prompt = message_history.length ? message_history[message_history.length - 1].content : ""; | 278 | const query_prompt = message_history.length ? message_history[message_history.length - 1].content : ""; |
| ... | @@ -280,7 +280,10 @@ async function runOpensource(req, res) { | ... | @@ -280,7 +280,10 @@ async function runOpensource(req, res) { |
| 280 | const validate = await validation(aiwp_app_id, req, res); | 280 | const validate = await validation(aiwp_app_id, req, res); |
| 281 | if(!validate) return; | 281 | if(!validate) return; |
| 282 | const { IS_FREE_USER, aiwp_logged_in, TRIED_USAGE} = validate; | 282 | const { IS_FREE_USER, aiwp_logged_in, TRIED_USAGE} = validate; |
| 283 | 283 | let usage = {}; | |
| 284 | let result_response = {} | ||
| 285 | let together_ai = null | ||
| 286 | |||
| 284 | try { | 287 | try { |
| 285 | let error_msg = ""; | 288 | let error_msg = ""; |
| 286 | const endpoint_api_url = get_endpoint_api_url(currentModel); | 289 | const endpoint_api_url = get_endpoint_api_url(currentModel); |
| ... | @@ -313,6 +316,11 @@ async function runOpensource(req, res) { | ... | @@ -313,6 +316,11 @@ async function runOpensource(req, res) { |
| 313 | input = response.data.choices[0].message.content | 316 | input = response.data.choices[0].message.content |
| 314 | } | 317 | } |
| 315 | 318 | ||
| 319 | usage = response.data.usage | ||
| 320 | |||
| 321 | if (together_ai_response) { | ||
| 322 | together_ai = response.data | ||
| 323 | } | ||
| 316 | } catch (e) { | 324 | } catch (e) { |
| 317 | let error_msg = e.response.data.error.message ? e.response.data.error.message : ''; | 325 | let error_msg = e.response.data.error.message ? e.response.data.error.message : ''; |
| 318 | if (error_msg.indexOf('maximum context length') >= 0) { | 326 | if (error_msg.indexOf('maximum context length') >= 0) { |
| ... | @@ -321,25 +329,15 @@ async function runOpensource(req, res) { | ... | @@ -321,25 +329,15 @@ async function runOpensource(req, res) { |
| 321 | // console.log(e.response); | 329 | // console.log(e.response); |
| 322 | } | 330 | } |
| 323 | } finally { | 331 | } finally { |
| 324 | |||
| 325 | let usage = {}; | ||
| 326 | let enc = null; | ||
| 327 | try { | ||
| 328 | enc = encodingForModel('gpt-3.5-turbo'); | ||
| 329 | usage.prompt_tokens = (enc.encode(query_prompt)).length; | ||
| 330 | usage.completion_tokens = (enc.encode(input)).length; | ||
| 331 | usage.total_tokens = usage.prompt_tokens + usage.completion_tokens; | ||
| 332 | } catch (e) { | ||
| 333 | console.log('Error encoding prompt text', e); | ||
| 334 | } | ||
| 335 | let usage_params = { | 332 | let usage_params = { |
| 336 | "aiwp_logged_in": aiwp_logged_in, "app": "chatbot+", "prompt_token": usage.prompt_tokens, "total_token": usage.total_tokens, "aiwp_app_id":aiwp_app_id , "usage_tries": TRIED_USAGE, ipdflu: getClientIP(req) | 333 | "aiwp_logged_in": aiwp_logged_in, "app": "chatbot+", "prompt_token": usage.prompt_tokens, "total_token": usage.total_tokens, "aiwp_app_id":aiwp_app_id , "usage_tries": TRIED_USAGE, ipdflu: getClientIP(req) |
| 337 | }; | 334 | }; |
| 335 | |||
| 338 | if(IS_FREE_USER) { | 336 | if(IS_FREE_USER) { |
| 339 | await setUsage(usage_params); | 337 | await setUsage(usage_params); |
| 340 | } | 338 | } |
| 341 | 339 | ||
| 342 | res.json({ | 340 | result_response = { |
| 343 | prompt: JSON.parse(message), | 341 | prompt: JSON.parse(message), |
| 344 | usage: usage, | 342 | usage: usage, |
| 345 | message: anchorme({ | 343 | message: anchorme({ |
| ... | @@ -350,7 +348,13 @@ async function runOpensource(req, res) { | ... | @@ -350,7 +348,13 @@ async function runOpensource(req, res) { |
| 350 | }, | 348 | }, |
| 351 | } | 349 | } |
| 352 | }) | 350 | }) |
| 353 | }); | 351 | } |
| 352 | |||
| 353 | if (together_ai !== null) { | ||
| 354 | result_response['together_ai'] = together_ai | ||
| 355 | } | ||
| 356 | |||
| 357 | res.json(result_response); | ||
| 354 | return; | 358 | return; |
| 355 | } | 359 | } |
| 356 | } | 360 | } | ... | ... |
-
Please register or sign in to post a comment