3f4efe8d by Janis

Merge branch '26797_formatting' into 'master'

26797_formatting

See merge request !2
2 parents c7cb2442 f00232ef
node_modules
\ No newline at end of file
......@@ -2,6 +2,7 @@
# dependencies
/node_modules
node_modules
/.pnp
.pnp.js
......
This diff could not be displayed because it is too large.
......@@ -8,6 +8,7 @@
"@testing-library/user-event": "^13.5.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-openai-api": "^1.0.2",
"react-scripts": "5.0.1",
"web-vitals": "^2.1.4"
},
......
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="#fff" class="bi bi-person" viewBox="0 0 16 16">
<path d="M8 8a3 3 0 1 0 0-6 3 3 0 0 0 0 6Zm2-3a2 2 0 1 1-4 0 2 2 0 0 1 4 0Zm4 8c0 1-1 1-1 1H3s-1 0-1-1 1-4 6-4 6 3 6 4Zm-1-.004c-.001-.246-.154-.986-.832-1.664C11.516 10.68 10.289 10 8 10c-2.29 0-3.516.68-4.168 1.332-.678.678-.83 1.418-.832 1.664h10Z"/>
</svg>
\ No newline at end of file
......@@ -9,7 +9,7 @@
.sidemenu {
width:260px;
padding:10px;
background-color:#202123;
background-color: #202123;
}
/* under 640px do this */
@media (max-width: 640px) {
......@@ -34,6 +34,7 @@
padding-right:12px;
}
.chatbox {
flex:1;
background-color:#343541;
......@@ -58,16 +59,17 @@
position:absolute;
bottom:0;
left:0;right:0;
background: rgb(57 57 57);
}
.chat-input-textarea {
background-color:#40414f;
width:90%;
padding:12px;
border-radius:5px;
color:white;
font-size:1.25em;
border:none;
outline:none;
background-color: #40414f;
width: 90%;
padding: 12px;
border-radius: 5px;
color: #ffffff;
font-size: 1.25em;
border: none;
outline: none;
box-shadow: 0 0 8px 0 rgba(0,0,0,0.25);
}
......@@ -94,20 +96,21 @@
}
.chat-message.chatgpt {
background-color:#444654;
background-color:#ffffff;
}
.chat-message-center {
max-width:640px;
margin-left:auto;
margin-right:auto;
display:flex;
padding:12px;
padding-left: 24px;
padding-right: 24px;
padding: 12px 24px;
}
section::-webkit-scrollbar {
display: none;
}
.avatar {
background:white;
background:#ffffff;
border-radius:50%;
width: 40px;
height: 40px;
......@@ -118,6 +121,8 @@
font-size:14px;
color:#444654;
}
.avatar.chatgpt {
background:#0da37f;
border-radius:50%;
......@@ -127,8 +132,11 @@
color:white;
}
.message {
padding-left: 40px;
padding-right: 40px;
padding: 7px 20px;
}
.chat-message.false {
background: #eef2ff;
}
@keyframes App-logo-spin {
......@@ -142,18 +150,18 @@
.select-models {
border: 1px solid white;
padding:12px;
padding: 12px;
border-radius:5px;
color:white;
background:transparent;
color: white;
background: transparent;
outline:none;
cursor:pointer;
max-width:100%;
min-width:100%;
}
.select-models option {
background:black;
color:white;
background: black;
color: #ffffff;
}
.button-picker {
......@@ -208,4 +216,4 @@
}
.submit:hover {
background:#066d55;
}
\ No newline at end of file
}
......
import './normal.css';
import './App.css';
import './color_theme_1.css';
import { useState, useEffect } from 'react';
import SideMenu from './SideMenu'
import ChatBox from './ChatBox'
import OpenAIAPI from "react-openai-api";
function App() {
......@@ -38,7 +40,7 @@ function App() {
setModels(data.models.data)
})
}
async function handleSubmit(e){
e.preventDefault();
let chatLogNew = [...chatLog, { user: "me", message: `${chatInput}`} ]
......@@ -46,30 +48,89 @@ function App() {
setChatLog(chatLogNew)
// fetch response to the api combining the chat log array of messages and seinding it as a message to localhost:3000 as a post
const messages = chatLogNew.map((message) => message.message).join("\n")
const response = await fetch("http://localhost:3080/", {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
message: messages,
currentModel,
})
});
const data = await response.json();
setChatLog([...chatLogNew, { user: "gpt", message: `${data.message}`} ])
var scrollToTheBottomChatLog = document.getElementsByClassName("chat-log")[0];
scrollToTheBottomChatLog.scrollTop = scrollToTheBottomChatLog.scrollHeight;
// const response = await fetch("http://localhost:3080/", {
// method: "POST",
// headers: {
// "Content-Type": "application/json"
// },
// body: JSON.stringify({
// message: messages,
// currentModel,
// })
// });
// const data = await response.json();
// setChatLog([...chatLogNew, { user: "gpt", message: `${data.message}`} ])
// var scrollToTheBottomChatLog = document.getElementsByClassName("chat-log")[0];
// scrollToTheBottomChatLog.scrollTop = scrollToTheBottomChatLog.scrollHeight;
var oHttp = new XMLHttpRequest();
oHttp.open("POST", "https://api.openai.com/v1/completions");
oHttp.setRequestHeader("Accept", "application/json");
oHttp.setRequestHeader("Content-Type", "application/json");
oHttp.setRequestHeader("Authorization", "Bearer " + "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")
oHttp.onreadystatechange = function () {
if (oHttp.readyState === 4) {
var s = ''
var oJson = {}
if (s != "") s += "\n";
try {
oJson = JSON.parse(oHttp.responseText);
} catch (ex) {
s += "Error: " + ex.message
}
if (oJson.error && oJson.error.message) {
s += "Error: " + oJson.error.message;
} else if (oJson.choices && oJson.choices[0].text) {
s = oJson.choices[0].text;
var a = s.split("?\n");
if (a.length == 2) {
s = a[1];
}
// if (selLang.value != "en-US") {
// var a = s.split("?\n");
// if (a.length == 2) {
// s = a[1];
// }
// }
if (s == "") s = "No response";
console.log('ssssssssssssssssssssss',s);
var replaceBR= (s.replace(/(?:\r\n|\r|\n)/g, "<br>")).replace(/\r?\n|\r/, "");
setChatLog([...chatLogNew, { user: "gpt", message: `${replaceBR}`} ]);
}
}
};
var sModel = currentModel;// "text-davinci-003";
var iMaxTokens = 100;
var sUserId = "1";
var dTemperature =temperature;
var data = {
model: sModel,
prompt: messages,
max_tokens: iMaxTokens,
//user: sUserId,
temperature: dTemperature,
// frequency_penalty: 0.0, //Number between -2.0 and 2.0 Positive value decrease the model's likelihood to repeat the same line verbatim.
//presence_penalty: 0.0, //Number between -2.0 and 2.0. Positive values increase the model's likelihood to talk about new topics.
//stop: ["#", ";"] //Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.
}
oHttp.send(JSON.stringify(data));;
}
function handleTemp(temp) {
if(temp > 1){
setTemperature(1)
// setTemperature(1)
} else if (temp < 0){
setTemperature(0)
// setTemperature(0)
} else {
setTemperature(temp)
// setTemperature(temp)
}
}
......@@ -77,17 +138,17 @@ function App() {
return (
<div className="App">
<SideMenu
currentModel={currentModel}
setCurrentModel={setCurrentModel}
currentModel={currentModel}
setCurrentModel={setCurrentModel}
models={models}
setTemperature={handleTemp}
temperature={temperature}
clearChat={clearChat}
/>
<ChatBox
<ChatBox
chatInput={chatInput}
chatLog={chatLog}
setChatInput={setChatInput}
chatLog={chatLog}
setChatInput={setChatInput}
handleSubmit={handleSubmit} />
</div>
);
......
......@@ -10,7 +10,7 @@ const ChatBox = ({chatLog, setChatInput, handleSubmit, chatInput}) =>
</div>
<div className="chat-input-holder">
<form className="form" onSubmit={handleSubmit}>
<input
<input
rows="1"
value={chatInput}
onChange={(e)=> setChatInput(e.target.value)}
......@@ -26,11 +26,12 @@ const ChatMessage = ({ message }) => {
<div className={`chat-message ${message.user === "gpt" && "chatgpt"}`}>
<div className="chat-message-center">
<div className={`avatar ${message.user === "gpt" && "chatgpt"}`}>
{message.user === "gpt" ? <OpenAISVGLogo /> : <div>You</div>}
{message.user === "gpt" ? <img className="ai-logo" src="../assets/images/bot.png" width="30px"/> : <img className="ai-logo" src="../assets/images/user.svg" />}
</div>
<div className="message">
{/* <div className="message">
{message.message}
</div>
</div> */}
<div className="message" dangerouslySetInnerHTML={{ __html: message.message }} />
</div>
</div>
)
......
const SideMenu = ({ clearChat, currentModel, setCurrentModel, models, setTemperature, temperature }) =>
const SideMenu = ({ clearChat, currentModel, setCurrentModel, models, setTemperature, temperature }) =>
<aside className="sidemenu">
<div className="side-menu-button" onClick={clearChat}>
<span>+</span>
New Chat
</div>
<div className="models">
<label className="side-label">Model</label>
<select
// active if model is select is currentModel
value={currentModel}
className="select-models"
onChange={(e)=>{
setCurrentModel(e.target.value)
}}>
{models && models.length ? models.map((model, index) => (
<option
key={model.id}
value={model.id}>{model.id}</option>
)) : <option
key={"text-davinci-003"}
value={"text-davinci-003"}>{"text-davinci-003"}</option>}
</select>
<Button
text="Smart - Davinci"
onClick={()=>setCurrentModel("text-davinci-003")} />
<Button
text="Code - Crushman"
onClick={()=>setCurrentModel("code-cushman-001")} />
<span className="info">
The model parameter controls the engine used to generate the response. Davinci produces best results.
</span>
<label className="side-label" >Temperature</label>
<input
className="select-models"
type="number"
onChange={(e)=> setTemperature(e.target.value)}
min="0"
max="1"
step="0.1"
value={temperature}
/>
<Button
text="0 - Logical"
onClick={()=>setTemperature(0)} />
<Button
text="0.5 - Balanced"
onClick={()=>setTemperature(0.5)} />
<Button
text="1 - Creative"
onClick={()=>setTemperature(1)} />
<span className="info">
The temperature parameter controls the randomness of the model. 0 is the most logical, 1 is the most creative.
</span>
</div>
</aside>
<div className="ai-logo-container">
<img className="ai-logo" src="../assets/images/AIPRO-WHITE.png" height="50px"/>
</div>
<div className="side-menu-button" onClick={clearChat}>
<span>+</span>
New Chat
</div>
<div className="models">
<label className="side-label">Model</label>
<select
// active if model is select is currentModel
value={currentModel}
className="select-models"
onChange={(e)=>{
setCurrentModel(e.target.value)
}}>
{models && models.length ? models.map((model, index) => (
<option
key={model.id}
value={model.id}>{model.id}</option>
)) : <option
key={"text-davinci-003"}
value={"text-davinci-003"}>{"text-davinci-003"}</option>}
</select>
<Button
text="Smart - Davinci"
onClick={()=>setCurrentModel("text-davinci-003")} />
<Button
text="Code - Crushman"
onClick={()=>setCurrentModel("code-cushman-001")} />
<span className="info">
The model parameter controls the engine used to generate the response. Davinci produces best results.
</span>
<label className="side-label" >Temperature</label>
<input
className="select-models"
type="number"
onChange={(e)=> setTemperature(e.target.value)}
min="0"
max="1"
step="0.1"
value={temperature}
/>
<Button
text="0 - Logical"
onClick={()=>setTemperature(0)} />
<Button
text="0.5 - Balanced"
onClick={()=>setTemperature(0.5)} />
<Button
text="1 - Creative"
onClick={()=>setTemperature(1)} />
<span className="info">
The temperature parameter controls the randomness of the model. 0 is the most logical, 1 is the most creative.
</span>
</div>
</aside>
const Button = ({ onClick, text }) =>
<div
className="button-picker"
<div
className="button-picker"
onClick={onClick}>
{text}
</div>
......
.sidemenu {
background-color: #101827 !important;
}
.side-menu-button {
border:0 solid white;
/* Permalink - use to edit and share this gradient: https://colorzilla.com/gradient-editor/#cedbe9+0,aac5de+17,6199c7+50,3a84c3+51,419ad6+59,4bb8f0+71,3a8bc2+84,26558b+100;Blue+Gloss */
background: rgb(206,219,233); /* Old browsers */
background: -moz-linear-gradient(-45deg, rgba(206,219,233,1) 0%, rgba(170,197,222,1) 17%, rgba(97,153,199,1) 50%, rgba(58,132,195,1) 51%, rgba(65,154,214,1) 59%, rgba(75,184,240,1) 71%, rgba(58,139,194,1) 84%, rgba(38,85,139,1) 100%); /* FF3.6-15 */
background: -webkit-linear-gradient(-45deg, rgba(206,219,233,1) 0%,rgba(170,197,222,1) 17%,rgba(97,153,199,1) 50%,rgba(58,132,195,1) 51%,rgba(65,154,214,1) 59%,rgba(75,184,240,1) 71%,rgba(58,139,194,1) 84%,rgba(38,85,139,1) 100%); /* Chrome10-25,Safari5.1-6 */
background: linear-gradient(135deg, rgba(206,219,233,1) 0%,rgba(170,197,222,1) 17%,rgba(97,153,199,1) 50%,rgba(58,132,195,1) 51%,rgba(65,154,214,1) 59%,rgba(75,184,240,1) 71%,rgba(58,139,194,1) 84%,rgba(38,85,139,1) 100%); /* W3C, IE10+, FF16+, Chrome26+, Opera12+, Safari7+ */
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#cedbe9', endColorstr='#26558b',GradientType=1 ); /* IE6-9 fallback on horizontal gradient */
text-shadow: 1px 1px 0px black;
font-weight: 500;
}
span.info {
text-align: left;
width: 100% !important;
display: block;
padding: 10px;
line-height: 1.5;
}
.chatbox {
background-color:#ffffff !important;
}
.chat-input-holder {
padding:15px !important;
background: rgb(57 57 57) !important;
}
.chat-input-textarea {
background-color: #dddddd !important;
color: #101827 !important;
font-size: 16px !important;
box-shadow: 0px 7px 6px -6px black !important;
}
.chat-message.chatgpt {
background-color:#ffffff !important;
}
.chat-message-center {
padding: 20px 10px !important;
}
.avatar {
background:#6BA447 !important;
border-radius:5px !important;
box-shadow: 0px 5px 5px -3px black !important;
}
.avatar.chatgpt {
background:#667DF1 !important;
border-radius:5px !important;
box-shadow: 0px 5px 5px -3px black !important;
}
.message {
padding: 7px 20px !important;
line-height: 25px;
font-size: 14px;
font-family: "Poppins", "Karla", sans-serif;
color: #353b4f;
-moz-osx-font-smoothing: grayscale;
-webkit-font-smoothing: antialiased !important;
-moz-font-smoothing: antialiased !important;
text-rendering: optimizelegibility !important;
}
.select-models {
padding:5px 10px !important;
background: #4d4f54 !important;
color: #5c6aa5 !important;
background: #ffffff !important;
border: 0 solid white;
}
.select-models option {
background: #2c374b !important;
color: #ffffff;
font-size: 12px;
}
.button-picker {
background:#5c6aa5 !important;
}
.submit {
/* Permalink - use to edit and share this gradient: https://colorzilla.com/gradient-editor/#b4ddb4+0,83c783+17,52b152+33,008a00+67,005700+83,002400+100;Green+3D+%231 */
background: rgb(180,221,180); /* Old browsers */
background: -moz-linear-gradient(-45deg, rgba(180,221,180,1) 0%, rgba(131,199,131,1) 17%, rgba(82,177,82,1) 33%, rgba(0,138,0,1) 67%, rgba(0,87,0,1) 83%, rgba(0,36,0,1) 100%); /* FF3.6-15 */
background: -webkit-linear-gradient(-45deg, rgba(180,221,180,1) 0%,rgba(131,199,131,1) 17%,rgba(82,177,82,1) 33%,rgba(0,138,0,1) 67%,rgba(0,87,0,1) 83%,rgba(0,36,0,1) 100%); /* Chrome10-25,Safari5.1-6 */
background: linear-gradient(135deg, rgba(180,221,180,1) 0%,rgba(131,199,131,1) 17%,rgba(82,177,82,1) 33%,rgba(0,138,0,1) 67%,rgba(0,87,0,1) 83%,rgba(0,36,0,1) 100%); /* W3C, IE10+, FF16+, Chrome26+, Opera12+, Safari7+ */
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#b4ddb4', endColorstr='#002400',GradientType=1 ); /* IE6-9 fallback on horizontal gradient */
box-shadow: 0 10px 12px -8px black;
}
.ai-logo-container {
text-align: center;
padding: 10px 0;
}
\ No newline at end of file
......@@ -5,8 +5,8 @@ const cors = require('cors')
// Open AI Configuration
const configuration = new Configuration({
organization: "org-organization",
apiKey: "sk-apiKey",
organization: "org-2OIAoj4fSwE4RCzgvglUM55T",
apiKey: "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",
});
const openai = new OpenAIApi(configuration);
......@@ -19,7 +19,7 @@ app.use(cors())
app.use(require('morgan')('dev'))
// Routing
// Routing
// Primary Open AI Route
app.post('/', async (req, res) => {
......@@ -27,7 +27,7 @@ app.post('/', async (req, res) => {
const response = await openai.createCompletion({
model: `${currentModel}`,// "text-davinci-003",
prompt: `${message}`,
max_tokens: 100,
max_tokens: 100,
temperature,
});
res.json({
......
Styling with Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!