6a728a14 by Leff Tubat

set default gpt4o-mini

1 parent 4b88ee48
......@@ -13,10 +13,12 @@ function App() {
displayTogetherAiResponse()
}, [])
const DEFAULT_MODEL = process.env.REACT_APP_DEFAULT_MODEL || 'gpt-4o-mini';
const [chatInput, setChatInput] = useState("");
const [models, setModels] = useState([]);
const [temperature, setTemperature] = useState(0.7);
const GPTTurbo = "gpt-3.5-turbo";
const GPTTurbo = DEFAULT_MODEL;
const [currentModel, setCurrentModel] = useState(GPTTurbo);
const [chatLog, setChatLog] = useState([{
user: "gpt",
......@@ -149,7 +151,7 @@ function App() {
const parsedData = data.message ? data.message.trim() : "";
if (togetherAiResponse) {
if (currentModel !== 'gpt-3.5-turbo') {
if (currentModel !== 'gpt-4o-mini') {
console.log(data)
}
}
......
import ExportButton from "./ExportButton";
const DEFAULT_MODEL = process.env.REACT_APP_DEFAULT_MODEL || 'gpt-4o-mini';
const SideMenu = ({
clearChat,
currentModel,
......@@ -33,8 +34,8 @@ const SideMenu = ({
</option>
))
) : (
<option key={"gpt-3.5-turbo"} value={"gpt-3.5-turbo"}>
{"gpt-3.5-turbo"}
<option key={DEFAULT_MODEL} value={DEFAULT_MODEL}>
{DEFAULT_MODEL}
</option>
)}
</select>
......
......@@ -10,6 +10,7 @@ const fetch = require('node-fetch');
const anchorme = require("anchorme").default;
const axios = require('axios');
const { encodingForModel } = require('js-tiktoken');
const DEFAULT_MODEL = process.env.REACT_APP_DEFAULT_MODEL || 'gpt-4o-mini';
const tiktokenModels = [
'text-davinci-003',
'text-davinci-002',
......@@ -45,7 +46,7 @@ const tiktokenModels = [
'gpt-4-0314',
'gpt-4-32k',
'gpt-4-32k-0314',
'gpt-3.5-turbo',
'gpt-4o-mini',
'gpt-3.5-turbo-0301'
];
......@@ -53,7 +54,7 @@ let client;
let filteredModels = {};
const allowedEndpoints = ["openAI", "Opensource", "Llama"];
const allowedModels = [
"gpt-3.5-turbo",
DEFAULT_MODEL,
"google/gemma-2-9b-it",
"meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo"
];
......@@ -157,7 +158,7 @@ app.post('/api', async (req, res) => {
let usage = {};
let enc = null;
try {
enc = encodingForModel(tiktokenModels.includes(currentModel) ? currentModel : 'gpt-3.5-turbo');
enc = encodingForModel(tiktokenModels.includes(currentModel) ? currentModel : DEFAULT_MODEL);
usage.prompt_tokens = (enc.encode(query_prompt)).length;
usage.completion_tokens = (enc.encode(input)).length;
usage.total_tokens = usage.prompt_tokens + usage.completion_tokens;
......@@ -240,7 +241,7 @@ async function runGPTTurbo(req, res) {
let usage = {};
let enc = null;
try {
enc = encodingForModel(tiktokenModels.includes(currentModel) ? currentModel : 'gpt-3.5-turbo');
enc = encodingForModel(tiktokenModels.includes(currentModel) ? currentModel : DEFAULT_MODEL);
usage.prompt_tokens = (enc.encode(query_prompt)).length;
usage.completion_tokens = (enc.encode(input)).length;
usage.total_tokens = usage.prompt_tokens + usage.completion_tokens;
......
Styling with Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!