Merge branch 'fix_master_updated_ui' into '1DEVT'
fix_master_updated_ui See merge request !4
Showing
7 changed files
with
42 additions
and
112 deletions
This diff is collapsed.
Click to expand it.
| ... | @@ -8,7 +8,6 @@ | ... | @@ -8,7 +8,6 @@ |
| 8 | "@testing-library/user-event": "^13.5.0", | 8 | "@testing-library/user-event": "^13.5.0", |
| 9 | "react": "^18.2.0", | 9 | "react": "^18.2.0", |
| 10 | "react-dom": "^18.2.0", | 10 | "react-dom": "^18.2.0", |
| 11 | "react-openai-api": "^1.0.2", | ||
| 12 | "react-scripts": "5.0.1", | 11 | "react-scripts": "5.0.1", |
| 13 | "web-vitals": "^2.1.4" | 12 | "web-vitals": "^2.1.4" |
| 14 | }, | 13 | }, | ... | ... |
| ... | @@ -9,7 +9,7 @@ | ... | @@ -9,7 +9,7 @@ |
| 9 | .sidemenu { | 9 | .sidemenu { |
| 10 | width:260px; | 10 | width:260px; |
| 11 | padding:10px; | 11 | padding:10px; |
| 12 | background-color: #202123; | 12 | background-color:#202123; |
| 13 | } | 13 | } |
| 14 | /* under 640px do this */ | 14 | /* under 640px do this */ |
| 15 | @media (max-width: 640px) { | 15 | @media (max-width: 640px) { |
| ... | @@ -34,7 +34,6 @@ | ... | @@ -34,7 +34,6 @@ |
| 34 | padding-right:12px; | 34 | padding-right:12px; |
| 35 | } | 35 | } |
| 36 | 36 | ||
| 37 | |||
| 38 | .chatbox { | 37 | .chatbox { |
| 39 | flex:1; | 38 | flex:1; |
| 40 | background-color:#343541; | 39 | background-color:#343541; |
| ... | @@ -59,17 +58,16 @@ | ... | @@ -59,17 +58,16 @@ |
| 59 | position:absolute; | 58 | position:absolute; |
| 60 | bottom:0; | 59 | bottom:0; |
| 61 | left:0;right:0; | 60 | left:0;right:0; |
| 62 | background: rgb(57 57 57); | ||
| 63 | } | 61 | } |
| 64 | .chat-input-textarea { | 62 | .chat-input-textarea { |
| 65 | background-color: #40414f; | 63 | background-color:#40414f; |
| 66 | width: 90%; | 64 | width:90%; |
| 67 | padding: 12px; | 65 | padding:12px; |
| 68 | border-radius: 5px; | 66 | border-radius:5px; |
| 69 | color: #ffffff; | 67 | color:white; |
| 70 | font-size: 1.25em; | 68 | font-size:1.25em; |
| 71 | border: none; | 69 | border:none; |
| 72 | outline: none; | 70 | outline:none; |
| 73 | box-shadow: 0 0 8px 0 rgba(0,0,0,0.25); | 71 | box-shadow: 0 0 8px 0 rgba(0,0,0,0.25); |
| 74 | } | 72 | } |
| 75 | 73 | ||
| ... | @@ -96,21 +94,20 @@ | ... | @@ -96,21 +94,20 @@ |
| 96 | } | 94 | } |
| 97 | 95 | ||
| 98 | .chat-message.chatgpt { | 96 | .chat-message.chatgpt { |
| 99 | background-color:#ffffff; | 97 | background-color:#444654; |
| 100 | } | 98 | } |
| 101 | .chat-message-center { | 99 | .chat-message-center { |
| 102 | max-width:640px; | 100 | max-width:640px; |
| 103 | margin-left:auto; | 101 | margin-left:auto; |
| 104 | margin-right:auto; | 102 | margin-right:auto; |
| 105 | display:flex; | 103 | display:flex; |
| 106 | padding: 12px 24px; | 104 | padding:12px; |
| 107 | } | 105 | padding-left: 24px; |
| 106 | padding-right: 24px; | ||
| 108 | 107 | ||
| 109 | section::-webkit-scrollbar { | ||
| 110 | display: none; | ||
| 111 | } | 108 | } |
| 112 | .avatar { | 109 | .avatar { |
| 113 | background:#ffffff; | 110 | background:white; |
| 114 | border-radius:50%; | 111 | border-radius:50%; |
| 115 | width: 40px; | 112 | width: 40px; |
| 116 | height: 40px; | 113 | height: 40px; |
| ... | @@ -121,8 +118,6 @@ section::-webkit-scrollbar { | ... | @@ -121,8 +118,6 @@ section::-webkit-scrollbar { |
| 121 | font-size:14px; | 118 | font-size:14px; |
| 122 | color:#444654; | 119 | color:#444654; |
| 123 | } | 120 | } |
| 124 | |||
| 125 | |||
| 126 | .avatar.chatgpt { | 121 | .avatar.chatgpt { |
| 127 | background:#0da37f; | 122 | background:#0da37f; |
| 128 | border-radius:50%; | 123 | border-radius:50%; |
| ... | @@ -132,11 +127,8 @@ section::-webkit-scrollbar { | ... | @@ -132,11 +127,8 @@ section::-webkit-scrollbar { |
| 132 | color:white; | 127 | color:white; |
| 133 | } | 128 | } |
| 134 | .message { | 129 | .message { |
| 135 | padding: 7px 20px; | 130 | padding-left: 40px; |
| 136 | } | 131 | padding-right: 40px; |
| 137 | |||
| 138 | .chat-message.false { | ||
| 139 | background: #eef2ff; | ||
| 140 | } | 132 | } |
| 141 | 133 | ||
| 142 | @keyframes App-logo-spin { | 134 | @keyframes App-logo-spin { |
| ... | @@ -150,18 +142,18 @@ section::-webkit-scrollbar { | ... | @@ -150,18 +142,18 @@ section::-webkit-scrollbar { |
| 150 | 142 | ||
| 151 | .select-models { | 143 | .select-models { |
| 152 | border: 1px solid white; | 144 | border: 1px solid white; |
| 153 | padding: 12px; | 145 | padding:12px; |
| 154 | border-radius:5px; | 146 | border-radius:5px; |
| 155 | color: white; | 147 | color:white; |
| 156 | background: transparent; | 148 | background:transparent; |
| 157 | outline:none; | 149 | outline:none; |
| 158 | cursor:pointer; | 150 | cursor:pointer; |
| 159 | max-width:100%; | 151 | max-width:100%; |
| 160 | min-width:100%; | 152 | min-width:100%; |
| 161 | } | 153 | } |
| 162 | .select-models option { | 154 | .select-models option { |
| 163 | background: black; | 155 | background:black; |
| 164 | color: #ffffff; | 156 | color:white; |
| 165 | } | 157 | } |
| 166 | 158 | ||
| 167 | .button-picker { | 159 | .button-picker { | ... | ... |
| ... | @@ -4,7 +4,6 @@ import './color_theme_1.css'; | ... | @@ -4,7 +4,6 @@ import './color_theme_1.css'; |
| 4 | import { useState, useEffect } from 'react'; | 4 | import { useState, useEffect } from 'react'; |
| 5 | import SideMenu from './SideMenu' | 5 | import SideMenu from './SideMenu' |
| 6 | import ChatBox from './ChatBox' | 6 | import ChatBox from './ChatBox' |
| 7 | import OpenAIAPI from "react-openai-api"; | ||
| 8 | 7 | ||
| 9 | function App() { | 8 | function App() { |
| 10 | 9 | ||
| ... | @@ -14,7 +13,7 @@ function App() { | ... | @@ -14,7 +13,7 @@ function App() { |
| 14 | 13 | ||
| 15 | const [chatInput, setChatInput] = useState(""); | 14 | const [chatInput, setChatInput] = useState(""); |
| 16 | const [models, setModels] = useState([]); | 15 | const [models, setModels] = useState([]); |
| 17 | const [temperature, setTemperature] = useState(0.5); | 16 | const [temperature, setTemperature] = useState(0.7); |
| 18 | const [currentModel, setCurrentModel] = useState("text-davinci-003"); | 17 | const [currentModel, setCurrentModel] = useState("text-davinci-003"); |
| 19 | const [chatLog, setChatLog] = useState([{ | 18 | const [chatLog, setChatLog] = useState([{ |
| 20 | user: "gpt", | 19 | user: "gpt", |
| ... | @@ -49,88 +48,29 @@ function App() { | ... | @@ -49,88 +48,29 @@ function App() { |
| 49 | // fetch response to the api combining the chat log array of messages and seinding it as a message to localhost:3000 as a post | 48 | // fetch response to the api combining the chat log array of messages and seinding it as a message to localhost:3000 as a post |
| 50 | const messages = chatLogNew.map((message) => message.message).join("\n") | 49 | const messages = chatLogNew.map((message) => message.message).join("\n") |
| 51 | 50 | ||
| 52 | 51 | const response = await fetch("http://localhost:3080/", { | |
| 53 | // const response = await fetch("http://localhost:3080/", { | 52 | method: "POST", |
| 54 | // method: "POST", | 53 | headers: { |
| 55 | // headers: { | 54 | "Content-Type": "application/json" |
| 56 | // "Content-Type": "application/json" | 55 | }, |
| 57 | // }, | 56 | body: JSON.stringify({ |
| 58 | // body: JSON.stringify({ | 57 | message: messages, |
| 59 | // message: messages, | 58 | currentModel, |
| 60 | // currentModel, | 59 | }) |
| 61 | // }) | 60 | }); |
| 62 | // }); | 61 | const data = await response.json(); |
| 63 | // const data = await response.json(); | 62 | setChatLog([...chatLogNew, { user: "gpt", message: `${data.message}`} ]) |
| 64 | // setChatLog([...chatLogNew, { user: "gpt", message: `${data.message}`} ]) | 63 | var scrollToTheBottomChatLog = document.getElementsByClassName("chat-log")[0]; |
| 65 | // var scrollToTheBottomChatLog = document.getElementsByClassName("chat-log")[0]; | 64 | scrollToTheBottomChatLog.scrollTop = scrollToTheBottomChatLog.scrollHeight; |
| 66 | // scrollToTheBottomChatLog.scrollTop = scrollToTheBottomChatLog.scrollHeight; | ||
| 67 | |||
| 68 | var oHttp = new XMLHttpRequest(); | ||
| 69 | oHttp.open("POST", "https://api.openai.com/v1/completions"); | ||
| 70 | oHttp.setRequestHeader("Accept", "application/json"); | ||
| 71 | oHttp.setRequestHeader("Content-Type", "application/json"); | ||
| 72 | oHttp.setRequestHeader("Authorization", "Bearer " + "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") | ||
| 73 | |||
| 74 | oHttp.onreadystatechange = function () { | ||
| 75 | if (oHttp.readyState === 4) { | ||
| 76 | var s = '' | ||
| 77 | var oJson = {} | ||
| 78 | if (s != "") s += "\n"; | ||
| 79 | try { | ||
| 80 | oJson = JSON.parse(oHttp.responseText); | ||
| 81 | } catch (ex) { | ||
| 82 | s += "Error: " + ex.message | ||
| 83 | } | ||
| 84 | if (oJson.error && oJson.error.message) { | ||
| 85 | s += "Error: " + oJson.error.message; | ||
| 86 | } else if (oJson.choices && oJson.choices[0].text) { | ||
| 87 | s = oJson.choices[0].text; | ||
| 88 | var a = s.split("?\n"); | ||
| 89 | if (a.length == 2) { | ||
| 90 | s = a[1]; | ||
| 91 | } | ||
| 92 | // if (selLang.value != "en-US") { | ||
| 93 | // var a = s.split("?\n"); | ||
| 94 | // if (a.length == 2) { | ||
| 95 | // s = a[1]; | ||
| 96 | // } | ||
| 97 | // } | ||
| 98 | if (s == "") s = "No response"; | ||
| 99 | console.log('ssssssssssssssssssssss',s); | ||
| 100 | var replaceBR= (s.replace(/(?:\r\n|\r|\n)/g, "<br>")).replace(/\r?\n|\r/, ""); | ||
| 101 | |||
| 102 | setChatLog([...chatLogNew, { user: "gpt", message: `${replaceBR}`} ]); | ||
| 103 | } | ||
| 104 | } | ||
| 105 | }; | ||
| 106 | |||
| 107 | var sModel = currentModel;// "text-davinci-003"; | ||
| 108 | var iMaxTokens = 100; | ||
| 109 | var sUserId = "1"; | ||
| 110 | var dTemperature =temperature; | ||
| 111 | |||
| 112 | var data = { | ||
| 113 | model: sModel, | ||
| 114 | prompt: messages, | ||
| 115 | max_tokens: iMaxTokens, | ||
| 116 | //user: sUserId, | ||
| 117 | temperature: dTemperature, | ||
| 118 | // frequency_penalty: 0.0, //Number between -2.0 and 2.0 Positive value decrease the model's likelihood to repeat the same line verbatim. | ||
| 119 | //presence_penalty: 0.0, //Number between -2.0 and 2.0. Positive values increase the model's likelihood to talk about new topics. | ||
| 120 | //stop: ["#", ";"] //Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. | ||
| 121 | } | ||
| 122 | |||
| 123 | oHttp.send(JSON.stringify(data));; | ||
| 124 | |||
| 125 | } | 65 | } |
| 126 | 66 | ||
| 127 | function handleTemp(temp) { | 67 | function handleTemp(temp) { |
| 128 | if(temp > 1){ | 68 | if(temp > 1){ |
| 129 | // setTemperature(1) | 69 | setTemperature(1) |
| 130 | } else if (temp < 0){ | 70 | } else if (temp < 0){ |
| 131 | // setTemperature(0) | 71 | setTemperature(0) |
| 132 | } else { | 72 | } else { |
| 133 | // setTemperature(temp) | 73 | setTemperature(temp) |
| 134 | } | 74 | } |
| 135 | 75 | ||
| 136 | } | 76 | } | ... | ... |
| ... | @@ -57,7 +57,7 @@ const SideMenu = ({ clearChat, currentModel, setCurrentModel, models, setTempera | ... | @@ -57,7 +57,7 @@ const SideMenu = ({ clearChat, currentModel, setCurrentModel, models, setTempera |
| 57 | The temperature parameter controls the randomness of the model. 0 is the most logical, 1 is the most creative. | 57 | The temperature parameter controls the randomness of the model. 0 is the most logical, 1 is the most creative. |
| 58 | </span> | 58 | </span> |
| 59 | </div> | 59 | </div> |
| 60 | </aside> | 60 | </aside> |
| 61 | 61 | ||
| 62 | const Button = ({ onClick, text }) => | 62 | const Button = ({ onClick, text }) => |
| 63 | <div | 63 | <div | ... | ... |
| ... | @@ -5,8 +5,8 @@ const cors = require('cors') | ... | @@ -5,8 +5,8 @@ const cors = require('cors') |
| 5 | 5 | ||
| 6 | // Open AI Configuration | 6 | // Open AI Configuration |
| 7 | const configuration = new Configuration({ | 7 | const configuration = new Configuration({ |
| 8 | organization: "org-2OIAoj4fSwE4RCzgvglUM55T", | 8 | organization: "org-organization", |
| 9 | apiKey: "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", | 9 | apiKey: "sk-apiKey", |
| 10 | }); | 10 | }); |
| 11 | const openai = new OpenAIApi(configuration); | 11 | const openai = new OpenAIApi(configuration); |
| 12 | 12 | ... | ... |
-
Please register or sign in to post a comment