0c874f9b by Jonille Arreglo

Merge branch 'master' of https://gitlab.baytech.ph/baytech/chatgpt.ai-pro.org in…

…to 28594_usage_tracking

# Conflicts:
#	index.js
2 parents 17eb87aa 1b4c3239
This diff could not be displayed because it is too large.
......@@ -10,6 +10,7 @@
width:260px;
padding:10px;
background-color:#202123;
overflow: auto;
}
/* under 640px do this */
@media (max-width: 640px) {
......@@ -129,7 +130,7 @@
.message {
width: 100%;
overflow: hidden;
word-wrap: break-word;
/* word-wrap: break-word; */
padding-left: 40px;
padding-right: 40px;
}
......
......@@ -47,10 +47,10 @@ function App() {
let model_list = [];
for( var i = 1; i < data.models.data.length; i++ ) {
let model = data.models.data[i];
if( !(model.id == "whisper-1"
|| model.id == "gpt-4"
|| model.id == "gpt-4-0314"
|| model.id == "gpt-4-0613") ) model_list.push(model);
if( !(model.id === "whisper-1"
|| model.id === "gpt-4"
|| model.id === "gpt-4-0314"
|| model.id === "gpt-4-0613") ) model_list.push(model);
}
setModels(model_list)
})
......@@ -86,14 +86,14 @@ function App() {
}
}
let chatLogNew = [...chatLog, { user: "me", message: `${userModifiedInput}`} ]
let chatLogNew = [...chatLog, { user: "user", message: `${userModifiedInput}`} ]
setChatInput("");
setChatLog(chatLogNew)
const userMessage = { user: "gpt", message: "..." };
setChatLog(prevChatLog => [...prevChatLog, userMessage]);
var messages = chatLogNew.map((message) => message.message).join("\n")
var messages = chatLogNew.map((message) => { if(message.user !== 'me') return message.message }).join("\n")
if(currentModel == GPTTurbo || currentModel == GPTTurbo0301) {
// "gpt-3.5-turbo"
let chatLogTurboNew = [...chatLogTurbo, { role: "user", content: chatInput }];
......@@ -116,7 +116,16 @@ function App() {
const data = await response.json();
const parsedData = data.message.trim();
// "gpt-3.5-turbo"
let chatLogTurboNew = [...chatLogTurbo, { role: "assistant", content: parsedData }];
let chatLogTurboNew = chatLogTurbo;
if(data.success === false) {
setChatLog(prevChatLog => {
const lastMsg = prevChatLog[prevChatLog.length - 2];
return [...prevChatLog.slice(0, prevChatLog.length - 2), { user: "me", message: lastMsg.message }];
});
userModifiedInput = "";
}
chatLogTurboNew.push({ role: "user", content: userModifiedInput });
chatLogTurboNew.push({ role: "assistant", content: parsedData });
setChatLogTurbo(chatLogTurboNew);
//
clearInterval(intervalId);
......
import React, { useState } from "react";
import SuggestedOptions from './suggestedOptions'
import ExportButton from "./ExportButton";
const ChatBox = ({chatLog, setChatInput, handleSubmit, chatInput, startedInteraction, setStartedInteraction}) => {
return (
......@@ -9,10 +9,11 @@ const ChatBox = ({chatLog, setChatInput, handleSubmit, chatInput, startedInterac
<SuggestedOptions setChatInput={setChatInput}/>
) : (
<>
<div className="chat-log">
<div className="chat-log response-to-export">
{chatLog.map((message, index) => (
<ChatMessage key={index} message={message} />
))}
<ExportButton className="export-button-mobile" label="Export Conversation" filename="Chat-Bot-Plus" />
</div>
</>
......@@ -39,9 +40,9 @@ const ChatBox = ({chatLog, setChatInput, handleSubmit, chatInput, startedInterac
const ChatMessage = ({ message }) => {
return (
<div className={`chat-message ${message.user === "gpt" && "chatgpt"}`}>
<div className="chat-message-center">
<div className="chat-message-center" style={ message.user === "gpt" ? { background: "#ddf1f9"} : {}}>
<div className={`avatar ${message.user === "gpt" && "chatgpt"}`}>
{message.user === "gpt" ? <img className="ai-logo" alt="Ai-pro bot" src="../assets/images/bot.png" width="30px"/> : <img className="ai-logo" alt="Ai-pro user" src="../assets/images/user.svg" />}
{message.user === "gpt" ? <img className="ai-logo" alt="Ai-pro bot" src="../assets/images/bot.png" width="30px"/> : <img className="ai-logo" alt="Ai-pro user" src="../assets/images/user.png" />}
</div>
{/* <div className="message">
{message.message}
......
export default function ExportButton({
label = "Export",
filename = "export",
className = "",
id = "",
}) {
const responseToExport = () => {
const response_to_export = document.querySelector(".response-to-export");
if (!response_to_export) return;
return response_to_export?.innerHTML ?? "";
};
const generatePDF = window.generatePDF;
const onClickExportToPDF = () => {
const response = responseToExport();
generatePDF(response, filename);
};
const onClickExportButton = () => {
let modal = document.querySelector(".export-modal-container");
const response = responseToExport();
if (!response) return;
if (!modal) {
const btutil_buildExportModal = window.btutil_buildExportModal;
modal = btutil_buildExportModal(onClickExportToPDF);
document.body.appendChild(modal);
}
modal.classList.add("active");
};
return (
<>
<div
className={`export-button ${className}`}
id={id}
onClick={onClickExportButton}
>
<svg
fill="#ffffff"
xmlns="http://www.w3.org/2000/svg"
height="1em"
viewBox="0 0 512 512"
>
<path d="M216 0h80c13.3 0 24 10.7 24 24v168h87.7c17.8 0 26.7 21.5 14.1 34.1L269.7 378.3c-7.5 7.5-19.8 7.5-27.3 0L90.1 226.1c-12.6-12.6-3.7-34.1 14.1-34.1H192V24c0-13.3 10.7-24 24-24zm296 376v112c0 13.3-10.7 24-24 24H24c-13.3 0-24-10.7-24-24V376c0-13.3 10.7-24 24-24h146.7l49 49c20.1 20.1 52.5 20.1 72.6 0l49-49H488c13.3 0 24 10.7 24 24zm-124 88c0-11-9-20-20-20s-20 9-20 20 9 20 20 20 20-9 20-20zm64 0c0-11-9-20-20-20s-20 9-20 20 9 20 20 20 20-9 20-20z" />
</svg>
<span>{label}</span>
</div>
</>
);
}
const SideMenu = ({ clearChat, currentModel, setCurrentModel, models, setTemperature, temperature }) =>
<aside className="sidemenu">
<div className="ai-logo-container">
<img className="ai-logo" alt="Ai-pro logo" src="../assets/images/chatgpt-aipro.png" height="50px"/>
</div>
<div className="side-menu-button" onClick={clearChat}>
<span>+</span>
New Chat
</div>
<div className="models">
import ExportButton from "./ExportButton";
const SideMenu = ({
clearChat,
currentModel,
setCurrentModel,
models,
setTemperature,
temperature,
}) => (
<aside className="sidemenu">
<div className="ai-logo-container">
<img
className="ai-logo"
alt="Ai-pro logo"
src="../assets/images/chatgpt-aipro.png"
height="50px"
/>
</div>
<div className="side-menu-button" onClick={clearChat}>
<span>+</span>
New Chat
</div>
<div className="models">
<label className="side-label">Model</label>
<select
// active if model is select is currentModel
value={currentModel}
className="select-models"
onChange={(e)=>{
setCurrentModel(e.target.value)
}}>
{models && models.length ? models.map((model, index) => (
<option
key={model.id}
value={model.id}>{model.id}</option>
)) : <option
key={"gpt-3.5-turbo"}
value={"gpt-3.5-turbo"}>{"gpt-3.5-turbo"}</option>}
</select>
<select
// active if model is select is currentModel
value={currentModel}
className="select-models"
onChange={(e) => {
setCurrentModel(e.target.value);
}}
>
{models && models.length ? (
models.map((model, index) => (
<option key={model.id} value={model.id}>
{model.id}
</option>
))
) : (
<option key={"gpt-3.5-turbo"} value={"gpt-3.5-turbo"}>
{"gpt-3.5-turbo"}
</option>
)}
</select>
<Button
text="Smart - Davinci"
onClick={()=>setCurrentModel("text-davinci-003")} />
<Button
text="Code - Crushman"
onClick={()=>setCurrentModel("code-cushman-001")} />
<span className="info">
The model parameter controls the engine used to generate the response. Davinci produces best results.
</span>
<label className="side-label" >Temperature</label>
<input
className="select-models"
type="number"
onChange={(e)=> setTemperature(e.target.value)}
min="0"
max="1"
step="0.1"
value={temperature}
/>
<Button
text="0 - Logical"
onClick={()=>setTemperature(0)} />
<Button
text="0.5 - Balanced"
onClick={()=>setTemperature(0.5)} />
<Button
text="1 - Creative"
onClick={()=>setTemperature(1)} />
<span className="info">
The temperature parameter controls the randomness of the model. 0 is the most logical, 1 is the most creative.
</span>
</div>
</aside>
<Button
text="Smart - Davinci"
onClick={() => setCurrentModel("text-davinci-003")}
/>
<Button
text="Code - Crushman"
onClick={() => setCurrentModel("code-cushman-001")}
/>
<span className="info">
The model parameter controls the engine used to generate the response.
Davinci produces best results.
</span>
<label className="side-label">Temperature</label>
<input
className="select-models"
type="number"
onChange={(e) => setTemperature(e.target.value)}
min="0"
max="1"
step="0.1"
value={temperature}
/>
<Button text="0 - Logical" onClick={() => setTemperature(0)} />
<Button text="0.5 - Balanced" onClick={() => setTemperature(0.5)} />
<Button text="1 - Creative" onClick={() => setTemperature(1)} />
<span className="info">
The temperature parameter controls the randomness of the model. 0 is the
most logical, 1 is the most creative.
</span>
<ExportButton label="Export Conversation" filename="Chat-Bot-Plus" />
</div>
</aside>
);
const Button = ({ onClick, text }) =>
<div
className="button-picker"
onClick={onClick}>
const Button = ({ onClick, text }) => (
<div className="button-picker" onClick={onClick}>
{text}
</div>
);
export default SideMenu
\ No newline at end of file
export default SideMenu;
......
@import url('https://fonts.googleapis.com/css2?family=Alegreya+Sans:ital,wght@0,400;0,500;1,400&display=swap');
/* @import url('https://fonts.googleapis.com/css2?family=Alegreya+Sans:ital,wght@0,400;0,500;1,400&display=swap'); */
body {
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
font-family: "Alegreya Sans", sans-serif;
/* font-family: "Alegreya Sans", sans-serif; */
}
.sidemenu {
background-color: #101827 !important;
......
......@@ -10,4 +10,23 @@ body {
code {
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
monospace;
}
.export-button-mobile {
display: none !important;
position: fixed;
bottom: 85px;
right: 0;
min-width: 50px;
box-shadow: 0px 4px 4px 0px #00000040;
}
.export-button-mobile span {
display: none;
}
@media screen and (max-width: 640px) {
.export-button-mobile {
display: flex !important;
}
}
\ No newline at end of file
......
......@@ -98,6 +98,19 @@ app.post('/api', async (req, res) => {
query_prompt = arr_body.join("\n")
}
}
const moderation = await axios.post("https://api.openai.com/v1/moderations", {
input: query_prompt
}, { headers: { 'content-type': 'application/json', 'Authorization': `Bearer ${process.env.OPENAI_API_KEY}` } });
if(moderation.data.results[0].flagged) {
res.json({
success: false,
message: "I'm sorry, but I can't assist with that. We want everyone to use our tool safely and responsibly.\nIf you have any other questions or need advice on a different topic, feel free to ask."
});
res.end();
return;
}
try {
const response = await openai.createCompletion({
model: `${currentModel}`,// "text-davinci-003",
......@@ -153,6 +166,18 @@ async function runGPTTurbo(req, res) {
var input = '';
const message_history = JSON.parse(message);
const query_prompt = message_history.length ? message_history[message_history.length - 1].content : "";
const moderation = await axios.post("https://api.openai.com/v1/moderations", {
input: query_prompt
}, { headers: { 'content-type': 'application/json', 'Authorization': `Bearer ${process.env.OPENAI_API_KEY}` } });
if(moderation.data.results[0].flagged) {
res.json({
success: false,
message: "I'm sorry, but I can't assist with that. We want everyone to use our tool safely and responsibly.\nIf you have any other questions or need advice on a different topic, feel free to ask."
});
res.end();
return;
}
try {
const response = await openai.createChatCompletion({
model: `${currentModel}`,
......@@ -177,16 +202,16 @@ async function runGPTTurbo(req, res) {
usage.prompt_tokens = (enc.encode(query_prompt)).length;
usage.completion_tokens = (enc.encode(input)).length;
usage.total_tokens = usage.prompt_tokens + usage.completion_tokens;
// TOKEN USAGE
axios.post(`${process.env.API_URL}e/set-chat-usage`,
{ app: 'chatbot', prompt_token: usage.prompt_tokens, total_token: usage.total_tokens },
{ headers: { 'content-type': 'application/x-www-form-urlencoded' }
});
} catch (e) {
console.log('Error encoding prompt text', e);
}
// TOKEN USAGE
axios.post(`${process.env.API_URL}e/set-chat-usage`,
{ app: 'chatbot', prompt_token: usage.prompt_tokens, total_token: usage.total_tokens },
{ headers: { 'content-type': 'application/x-www-form-urlencoded' }
});
res.json({
prompt: JSON.parse(message),
message: anchorme({
......
Styling with Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!