App.js 9.6 KB
import './normal.css';
import './App.css';
import './color_theme_1.css';
import { useState, useEffect } from 'react';
import SideMenu from './SideMenu'
import ChatBox from './ChatBox'
import Cookies from 'js-cookie'

function App() {

  useEffect(() => {
    getEngines();
    displayTogetherAiResponse()
  }, [])
  
  const DEFAULT_MODEL = process.env.REACT_APP_DEFAULT_MODEL || 'gpt-4o-mini';

  const [chatInput, setChatInput] = useState("");
  const [models, setModels] = useState([]);
  const [temperature, setTemperature] = useState(0.7);
  const GPTTurbo = DEFAULT_MODEL;
  const [currentModel, setCurrentModel] = useState(GPTTurbo);
  const [chatLog, setChatLog] = useState([{
    user: "gpt",
    message: "Welcome to AI-PRO... How can I help you?"
  }]);
  // "gpt-3.5-turbo"
  const defaultChatLogTurbo = [
    { role: "system", content: "You are a helpful assistant."}
  ];
  const [chatLogTurbo, setChatLogTurbo] = useState(defaultChatLogTurbo);
  //

  const [chatLogOpenSource, setChatLogOpenSource] = useState([]);

  const displayTogetherAiResponse = () => {
    const queryParams = new URLSearchParams(window.location.search)
    const cookieName = 'P6XcW47o'
    const exists = queryParams.has(cookieName)
    const enable = queryParams.get(cookieName)

    if (exists && (enable == '1' || enable == '0')) {
      Cookies.set(cookieName, enable, {
        expires: 1
      })
    }
  }
  
  function clearChat(){
    setChatLog([]);
    setChatLogTurbo(defaultChatLogTurbo);
    setChatLogOpenSource([]);
    setChatInput("");
    setStartedInteraction(false);
  }

  function getEngines(){
    fetch(process.env.REACT_APP_SERVER_URL + "/models")
    .then(res => res.json())
    .then(data => {
      data.models.data.sort((a, b) => {
        if(a.id < b.id) { return -1; }
        if(a.id > b.id) { return 1; }
        return 0;
      });
      let model_list = [];
      for( var i = 0; i < data.models.data.length; i++ ) {
        let model = data.models.data[i];
        if( !(model.id === "whisper-1"
        || model.id === "gpt-4"
        || model.id === "gpt-4-0314"
        || model.id === "gpt-4-0613") ) model_list.push(model);
      }
      setModels(model_list);
    })
  }

  async function handleSubmit(e){
    e.preventDefault();
    submitPrompt();
  }

  const getEndpoint = (modelName) => {
    const model = models.find((m) => m.id === modelName);
    return model ? model.endpoint : null;
  };

  async function submitPrompt() {

    const TPLogicRun = window.TPLogicRun;
    if (typeof TPLogicRun === 'function') {
      TPLogicRun();
    }

    const userInput = ['what', 'why', 'when', 'where' , 'which', 'did', 'do', 'how', 'can', 'are', 'who'];
    const userInputRegex = new RegExp(`\\b(${userInput.join('|')})\\b`, 'gi');
    const inputMatches = chatInput.match(userInputRegex);

    const userPunctuation = ['.', '?', '!', ':', ';', ','];
    const userPunctuationRegex = new RegExp(`[${userPunctuation.join('')}]$`);
    const punctuationMatches = chatInput.match(userPunctuationRegex);

    var userModifiedInput = chatInput

    if (!punctuationMatches) {
      if (!inputMatches) {
        userModifiedInput = chatInput + ".";
      } else {
        userModifiedInput = chatInput + "?";
      }
    }

    let chatLogNew = [...chatLog, { user: "user", message: `${userModifiedInput}`} ]
    setChatInput("");
    setChatLog(chatLogNew)

    const userMessage = { user: "gpt", message: "..." };
    setChatLog(prevChatLog => [...prevChatLog, userMessage]);

    var messages = chatLogNew.map((message) => { if(message.user !== 'me') return message.message }).join("\n")
    let endpoint = getEndpoint(currentModel);
    if(endpoint === "openAI") {
      let chatLogTurboNew = [...chatLogTurbo, { role: "user", content: chatInput }];
      setChatLogTurbo(chatLogTurboNew);
      messages = JSON.stringify(chatLogTurboNew);
    }
    if(endpoint === "Llama" || endpoint === "Opensource") {
      let chatLogOpenSourceNew = [...chatLogOpenSource, { role: "user", content: chatInput }];
      setChatLogOpenSource(chatLogOpenSourceNew);
      messages = JSON.stringify(chatLogOpenSourceNew);
    }

    let intervalId = startInterval();

    try {
      const togetherAiResponse = Cookies.get('P6XcW47o') === '1'

      const response = await fetch(process.env.REACT_APP_SERVER_URL + "/api", {
        method: "POST",
        headers: {
          "Content-Type": "application/json"
        },
        body: JSON.stringify({
          message: messages,
          currentModel,
          temperature,
          P6XcW47o: togetherAiResponse
        })
      });
      const data = await response.json();
      const parsedData = data.message ? data.message.trim() : "";

      if(data.status === 'invalid'){
        if(data.limited) {
          window.btutil_modalRegisterUpgrade(false);
          return;
        }
        if(data.data && data.data.status === 'max-tokens') {
          if(data.data.ent_member === 'yes') {
            window.btutil_modalMaxTokenUpgradeEntMembers();
            return;
          }
          window.btutil_modalMaxTokenUpgrade();
          return;
        }
        window.btutil_modalRegisterUpgrade(false);
        return;
      }
      // "gpt-3.5-turbo"
      let chatLogTurboNew = chatLogTurbo;
      let chatLogOpenSourceNew = chatLogOpenSource;
      if(data.success === false) {
        setChatLog(prevChatLog => {
          const lastMsg = prevChatLog[prevChatLog.length - 2];
          return [...prevChatLog.slice(0, prevChatLog.length - 2), { user: "me", message: lastMsg.message }];
        });
        userModifiedInput = "";
      }

      chatLogTurboNew.push({ role: "user", content: userModifiedInput });
      chatLogTurboNew.push({ role: "assistant", content: parsedData });

      chatLogOpenSourceNew.push({ role: "user", content: userModifiedInput });
      chatLogOpenSourceNew.push({ role: "assistant", content: parsedData });
      setChatLogTurbo(chatLogTurboNew);
      setChatLogOpenSource(chatLogOpenSourceNew);
      //
      clearInterval(intervalId);
      const programmingKeywords = ['code', 'application', 'controller', 'rails' , 'PHP', 'java', 'javascript', 'script', 'console', 'python', 'programming', 'table'];

      const regex = new RegExp(`\\b(${programmingKeywords.join('|')})\\b`, 'gi');
      const matches = parsedData.match(regex);
      if (!matches) {
        var replaceTags = (parsedData.replace(/(?:\r\n|\r|\n)/g, '<br>')).replace(/\*\*(.*?)\*\*/g, '$1').replace(/###\s(.+)/g, '$1');
      } else {
          replaceTags = (parsedData
            .replace(':',':<code>')
            .replace('<?','&#60;?')
            .replace('?>','?&#62;')
            .replace(/\n/g, '<br>')
            .replace(/\*\*(.*?)\*\*/g, '$1')
            .replace(/###\s(.+)/g, '$1')
          )
      }

      for (let i = 0; i < replaceTags.length; i++) {
        setTimeout(() => {
          const parsedMsg = replaceTags.slice(0, i + 1);
          updateLastMessage(parsedMsg);
          var scrollToTheBottomChatLog = document.getElementsByClassName("chat-log")[0];
          scrollToTheBottomChatLog.scrollTop = scrollToTheBottomChatLog.scrollHeight;
        }, i * 5);
      }

      function updateLastMessage(parsedMsg) {
        setChatLog(prevChatLog => {
          const lastMsg = prevChatLog[prevChatLog.length - 1];
          if (lastMsg && lastMsg.user === "gpt") {
            return [...prevChatLog.slice(0, prevChatLog.length - 1), { user: lastMsg.user, message: parsedMsg }];
          } else {
            return [...prevChatLog, { user: "gpt", message: parsedMsg }];
          }
        });
      }

    } catch (error) {
      console.log(error)
      const errorMsg = "We apologize for any inconvenience caused due to the delay in the response time. Please try again.";
      setChatLog([...chatLogNew, { user: "gpt", message: `<div class="errormsg"><span>i</span><div class="msg">${errorMsg}</div></div>`} ])
    }

    function startInterval() {
      return setInterval(function() {
        if (userMessage.message.length === 3) {
          userMessage.message = ".";
        } else if (userMessage.message.length === 1) {
          userMessage.message = "..";
        } else {
          userMessage.message = "...";
        }
        var thinkingDots = document.getElementsByClassName("message");
        var thinkingDot = thinkingDots[thinkingDots.length - 1];
        thinkingDot.innerHTML = userMessage.message;
      }, 500);
    }
  }

  function handleTemp(temp) {
    if(temp > 1){
      setTemperature(1)
    } else if (temp < 0){
      setTemperature(0)
    } else {
      setTemperature(temp)
    }

  }

  useEffect(() => {
    const element = document.querySelector('#modal-container #modal-cta')

    const handleMixpanel = () => {
      if (window.mixpanel) {
        window.mixpanel.track('chatbot-plus-upgrade-modal', {
          'click-location': 'upgrade-modal',
          'upgrade': currentModel
        });
      }
    }

    if (element) {
      element.addEventListener('click', handleMixpanel)
    }

    return () => {
      if (element) {
        element.removeEventListener('click', handleMixpanel)
      }
    }
  }, [currentModel])
  

  const [startedInteraction, setStartedInteraction] = useState(false);
  return (
    <div className="App">
      <SideMenu
        currentModel={currentModel}
        setCurrentModel={setCurrentModel}
        models={models}
        setTemperature={handleTemp}
        temperature={temperature}
        clearChat={clearChat}
      />

      <ChatBox
        chatInput={chatInput}
        chatLog={chatLog}
        setChatInput={setChatInput}
        startedInteraction={startedInteraction}
        setStartedInteraction={setStartedInteraction}
        handleSubmit={handleSubmit} />
    </div>
  );
}


export default App;