25cf9790 by Administrator

Merge branch '31531_apply_tokenization_on_other_llms' into 'master'

#31531 - Apply tokenization logic on other LLMs

See merge request !112
2 parents 2ed7286d 4bcf56e7
Pipeline #29840 for 25cf9790 passed in 18 seconds
......@@ -6,6 +6,7 @@
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
"js-cookie": "^3.0.5",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-scripts": "5.0.1",
......
......@@ -4,11 +4,13 @@ import './color_theme_1.css';
import { useState, useEffect } from 'react';
import SideMenu from './SideMenu'
import ChatBox from './ChatBox'
import Cookies from 'js-cookie'
function App() {
useEffect(() => {
getEngines();
displayTogetherAiResponse()
}, [])
const [chatInput, setChatInput] = useState("");
......@@ -30,6 +32,19 @@ function App() {
const [chatLogOpenSource, setChatLogOpenSource] = useState([]);
const displayTogetherAiResponse = () => {
const queryParams = new URLSearchParams(window.location.search)
const cookieName = 'P6XcW47o'
const exists = queryParams.has(cookieName)
const enable = queryParams.get(cookieName)
if (exists && (enable == '1' || enable == '0')) {
Cookies.set(cookieName, enable, {
expires: 1
})
}
}
function clearChat(){
setChatLog([]);
setChatLogTurbo(defaultChatLogTurbo);
......@@ -115,6 +130,8 @@ function App() {
let intervalId = startInterval();
try {
const togetherAiResponse = Cookies.get('P6XcW47o') === '1'
const response = await fetch(process.env.REACT_APP_SERVER_URL + "/api", {
method: "POST",
headers: {
......@@ -123,12 +140,19 @@ function App() {
body: JSON.stringify({
message: messages,
currentModel,
temperature
temperature,
P6XcW47o: togetherAiResponse
})
});
const data = await response.json();
const parsedData = data.message ? data.message.trim() : "";
if (togetherAiResponse) {
if (currentModel !== 'gpt-3.5-turbo') {
console.log(data)
}
}
if(data.status === 'invalid'){
if(data.limited) {
window.btutil_modalRegisterUpgrade(false);
......
......@@ -272,7 +272,7 @@ const get_endpoint_api_key = (currentModel) => {
return key
}
async function runOpensource(req, res) {
const { message, currentModel, temperature } = req.body;
const { message, currentModel, temperature, P6XcW47o: together_ai_response = null } = req.body;
var input = '';
const message_history = JSON.parse(message);
const query_prompt = message_history.length ? message_history[message_history.length - 1].content : "";
......@@ -280,7 +280,10 @@ async function runOpensource(req, res) {
const validate = await validation(aiwp_app_id, req, res);
if(!validate) return;
const { IS_FREE_USER, aiwp_logged_in, TRIED_USAGE} = validate;
let usage = {};
let result_response = {}
let together_ai = null
try {
let error_msg = "";
const endpoint_api_url = get_endpoint_api_url(currentModel);
......@@ -313,6 +316,11 @@ async function runOpensource(req, res) {
input = response.data.choices[0].message.content
}
usage = response.data.usage
if (together_ai_response) {
together_ai = response.data
}
} catch (e) {
let error_msg = e.response.data.error.message ? e.response.data.error.message : '';
if (error_msg.indexOf('maximum context length') >= 0) {
......@@ -321,25 +329,15 @@ async function runOpensource(req, res) {
// console.log(e.response);
}
} finally {
let usage = {};
let enc = null;
try {
enc = encodingForModel('gpt-3.5-turbo');
usage.prompt_tokens = (enc.encode(query_prompt)).length;
usage.completion_tokens = (enc.encode(input)).length;
usage.total_tokens = usage.prompt_tokens + usage.completion_tokens;
} catch (e) {
console.log('Error encoding prompt text', e);
}
let usage_params = {
"aiwp_logged_in": aiwp_logged_in, "app": "chatbot+", "prompt_token": usage.prompt_tokens, "total_token": usage.total_tokens, "aiwp_app_id":aiwp_app_id , "usage_tries": TRIED_USAGE, ipdflu: getClientIP(req)
};
if(IS_FREE_USER) {
await setUsage(usage_params);
}
res.json({
result_response = {
prompt: JSON.parse(message),
usage: usage,
message: anchorme({
......@@ -350,7 +348,13 @@ async function runOpensource(req, res) {
},
}
})
});
}
if (together_ai !== null) {
result_response['together_ai'] = together_ai
}
res.json(result_response);
return;
}
}
......
Styling with Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!