17eb87aa by Jonille Arreglo

28594_usage_tracking

1 parent c954a68a
......@@ -5,6 +5,7 @@ const cors = require('cors')
require('dotenv').config()
const rateLimit = require('express-rate-limit')
const anchorme = require("anchorme").default;
const axios = require('axios');
const tiktoken = require('@dqbd/tiktoken');
const tiktokenModels = [
'text-davinci-003',
......@@ -105,6 +106,23 @@ app.post('/api', async (req, res) => {
temperature,
});
let input = response.data.choices[0].text;
let usage = {};
let enc = null;
try {
enc = encoding_for_model(tiktokenModels.includes(currentModel) ? currentModel : 'gpt-3.5-turbo');
usage.prompt_tokens = (enc.encode(query_prompt)).length;
usage.completion_tokens = (enc.encode(input)).length;
usage.total_tokens = usage.prompt_tokens + usage.completion_tokens;
} catch (e) {
console.log('Error encoding prompt text', e);
}
// TOKEN USAGE
axios.post(`${process.env.API_URL}e/set-chat-usage`,
{ app: 'chatbot', prompt_token: usage.prompt_tokens, total_token: usage.total_tokens },
{ headers: { 'content-type': 'application/x-www-form-urlencoded' }
});
res.json({
message: anchorme({
input,
......@@ -133,6 +151,8 @@ async function runGPTTurbo(req, res) {
// "gpt-3.5-turbo"
const { message, currentModel, temperature } = req.body;
var input = '';
const message_history = JSON.parse(message);
const query_prompt = message_history.length ? message_history[message_history.length - 1].content : "";
try {
const response = await openai.createChatCompletion({
model: `${currentModel}`,
......@@ -149,6 +169,24 @@ async function runGPTTurbo(req, res) {
console.log(e.response);
}
} finally {
let usage = {};
let enc = null;
try {
enc = encoding_for_model(tiktokenModels.includes(currentModel) ? currentModel : 'gpt-3.5-turbo');
usage.prompt_tokens = (enc.encode(query_prompt)).length;
usage.completion_tokens = (enc.encode(input)).length;
usage.total_tokens = usage.prompt_tokens + usage.completion_tokens;
} catch (e) {
console.log('Error encoding prompt text', e);
}
// TOKEN USAGE
axios.post(`${process.env.API_URL}e/set-chat-usage`,
{ app: 'chatbot', prompt_token: usage.prompt_tokens, total_token: usage.total_tokens },
{ headers: { 'content-type': 'application/x-www-form-urlencoded' }
});
res.json({
prompt: JSON.parse(message),
message: anchorme({
......
......@@ -11,6 +11,7 @@
"dependencies": {
"@dqbd/tiktoken": "^1.0.7",
"anchorme": "^2.1.2",
"axios": "^1.5.1",
"body-parser": "^1.20.1",
"cors": "^2.8.5",
"dotenv": "^16.0.3",
......
Styling with Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!