8533480c by RSA

WIP

1 parent 02548d9d
......@@ -29,7 +29,7 @@ function App() {
fetch(process.env.REACT_APP_SERVER_URL + "/models")
.then(res => res.json())
.then(data => {
// console.log(data.models.data)
console.log(data.models.data)
// set models in order alpahbetically
data.models.data.sort((a, b) => {
if(a.id < b.id) { return -1; }
......@@ -47,13 +47,12 @@ function App() {
setChatLog(chatLogNew)
// fetch response to the api combining the chat log array of messages and seinding it as a message to localhost:3000 as a post
const messages = chatLogNew.map((message) => message.message).join("\n")
console.log(chatLogNew[0])
const messageDiv = document.getElementsByClassName("message");
const response = await fetch(process.env.REACT_APP_SERVER_URL + "/api", {
method: "POST",
headers: {
"Content-Type": "application/json"
"Content-Type": "application/json",
"Accept":"application/json"
},
body: JSON.stringify({
message: messages,
......@@ -63,36 +62,72 @@ function App() {
const data = await response.json();
const parsedData = data.message.trim();
console.log("-----------")
console.log(data)
console.log("===========")
console.log(parsedData)
typeText(messageDiv,parsedData);
i = 0;
setChatLog([...chatLogNew, { user: "gpt", message: `${data.message}`} ])
var scrollToTheBottomChatLog = document.getElementsByClassName("chat-log")[0];
scrollToTheBottomChatLog.scrollTop = scrollToTheBottomChatLog.scrollHeight;
}
let i = 0;
const delay = 100;
const chat_container = document.getElementsByClassName("chat-log");
const typeText = (element, text)=>{
let index = 0
let interval = setTimeout(()=>{
if (i <= text.length){
element.innerHTML = text.substring(0, i);
setTimeout(typeText(element, text), delay);
i++
}
},30)
chat_container.scrollTop = chat_container.clientHeight * 100;
}
// var oHttp = new XMLHttpRequest();
// oHttp.open("POST", "https://api.openai.com/v1/completions");
// oHttp.setRequestHeader("Accept", "application/json");
// oHttp.setRequestHeader("Content-Type", "application/json");
// oHttp.setRequestHeader("Authorization", "Bearer " + "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")
// oHttp.onreadystatechange = function () {
// if (oHttp.readyState === 4) {
// var s = ''
// var oJson = {}
// if (s != "") s += "\n";
// try {
// oJson = JSON.parse(oHttp.responseText);
// } catch (ex) {
// s += "Error: " + ex.message
// }
// if (oJson.error && oJson.error.message) {
// s += "Error: " + oJson.error.message;
// } else if (oJson.choices && oJson.choices[0].text) {
// s = oJson.choices[0].text;
// var a = s.split("?\n");
// if (a.length == 2) {
// s = a[1];
// }
// // if (selLang.value != "en-US") {
// // var a = s.split("?\n");
// // if (a.length == 2) {
// // s = a[1];
// // }
// // }
// if (s == "") s = "No response";
// console.log('ssssssssssssssssssssss',s);
// var replaceBR= (s.replace(/(?:\r\n|\r|\n)/g, "<br>")).replace(/\r?\n|\r/, "");
// setChatLog([...chatLogNew, { user: "gpt", message: `${replaceBR}`} ]);
// }
// }
// };
// var sModel = currentModel;// "text-davinci-003";
// var iMaxTokens = 100;
// var sUserId = "1";
// var dTemperature =temperature;
// var data = {
// model: sModel,
// prompt: messages,
// max_tokens: iMaxTokens,
// //user: sUserId,
// temperature: dTemperature,
// // frequency_penalty: 0.0, //Number between -2.0 and 2.0 Positive value decrease the model's likelihood to repeat the same line verbatim.
// //presence_penalty: 0.0, //Number between -2.0 and 2.0. Positive values increase the model's likelihood to talk about new topics.
// //stop: ["#", ";"] //Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.
// }
var oHttp = new XMLHttpRequest();
oHttp.open("POST", "/your-endpoint", true);
oHttp.setRequestHeader("Content-Type", "application/json;charset=UTF-8");
oHttp.send(JSON.stringify(data));
}
function handleTemp(temp) {
if(temp > 1){
......
Styling with Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!