71af6bfe by Jonille Arreglo

Merge branch '28594_usage_tracking' into 'master'

28594_usage_tracking

See merge request !72
2 parents 1b4c3239 87e01071
......@@ -6,6 +6,47 @@ require('dotenv').config()
const rateLimit = require('express-rate-limit')
const anchorme = require("anchorme").default;
const axios = require('axios');
const tiktoken = require('@dqbd/tiktoken');
const tiktokenModels = [
'text-davinci-003',
'text-davinci-002',
'text-davinci-001',
'text-curie-001',
'text-babbage-001',
'text-ada-001',
'davinci',
'curie',
'babbage',
'ada',
'code-davinci-002',
'code-davinci-001',
'code-cushman-002',
'code-cushman-001',
'davinci-codex',
'cushman-codex',
'text-davinci-edit-001',
'code-davinci-edit-001',
'text-embedding-ada-002',
'text-similarity-davinci-001',
'text-similarity-curie-001',
'text-similarity-babbage-001',
'text-similarity-ada-001',
'text-search-davinci-doc-001',
'text-search-curie-doc-001',
'text-search-babbage-doc-001',
'text-search-ada-doc-001',
'code-search-babbage-code-001',
'code-search-ada-code-001',
'gpt2',
'gpt-4',
'gpt-4-0314',
'gpt-4-32k',
'gpt-4-32k-0314',
'gpt-3.5-turbo',
'gpt-3.5-turbo-0301'
];
const encoding_for_model = tiktoken.encoding_for_model;
const user_secret_id = process.env.USER_SECRET_ID ? process.env.USER_SECRET_ID : "aiwp_logged_in";
// Open AI Configuration
// console.log(process.env.OPENAI_API_ORG)
......@@ -79,6 +120,20 @@ app.post('/api', async (req, res) => {
temperature,
});
let input = response.data.choices[0].text;
let usage = {};
let enc = null;
try {
enc = encoding_for_model(tiktokenModels.includes(currentModel) ? currentModel : 'gpt-3.5-turbo');
usage.prompt_tokens = (enc.encode(query_prompt)).length;
usage.completion_tokens = (enc.encode(input)).length;
usage.total_tokens = usage.prompt_tokens + usage.completion_tokens;
// TOKEN USAGE
btutil_setChatUsage('chatbot+', usage.prompt_tokens, usage.total_tokens);
} catch (e) {
console.log('Error encoding prompt text', e);
}
res.json({
message: anchorme({
input,
......@@ -137,6 +192,24 @@ async function runGPTTurbo(req, res) {
console.log(e.response);
}
} finally {
let usage = {};
let enc = null;
try {
enc = encoding_for_model(tiktokenModels.includes(currentModel) ? currentModel : 'gpt-3.5-turbo');
usage.prompt_tokens = (enc.encode(query_prompt)).length;
usage.completion_tokens = (enc.encode(input)).length;
usage.total_tokens = usage.prompt_tokens + usage.completion_tokens;
// TOKEN USAGE
axios.post(`${process.env.API_URL}/e/set-chat-usage`,
{ aiwp_logged_in: req.cookies[user_secret_id], app: 'chatbot', prompt_token: usage.prompt_tokens, total_token: usage.total_tokens },
{ headers: { 'content-type': 'application/x-www-form-urlencoded' }
});
} catch (e) {
console.log('Error encoding prompt text', e);
}
res.json({
prompt: JSON.parse(message),
message: anchorme({
......
......@@ -9,9 +9,12 @@
"author": "",
"license": "ISC",
"dependencies": {
"@dqbd/tiktoken": "^1.0.7",
"anchorme": "^2.1.2",
"axios": "^1.5.1",
"body-parser": "^1.20.1",
"cookie": "0.5.0",
"cookie-parser": "1.4.6",
"cors": "^2.8.5",
"dotenv": "^16.0.3",
"express": "^4.18.2",
......
Styling with Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!