e6687917 by Janis

Merge branch 'fix_master_updated_ui' into '1DEVT'

fix_master_updated_ui

See merge request !5
2 parents 21eeded6 b22f9a7b
1 # ChatGPT Server
2
3 ## Installation
4
5 npm install on the root directory.
6
7 ## Create ENV Variables
8
9 Create an .env file in the root directory of your application.
10 Duplicate the env-template and rename to .env
11 In the .env file, define the environment variables you want to use in your application. ex.
12
13 API_KEY="sk-xxxxxxxx"
14 API_ORG="org-xxxxxxx"
15 SERVER_URL="http://localhost:3080/"
16
17 ## Run server
18
19 Run server on the root directory
20
21 `node index.js`
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
13 13
14 # misc 14 # misc
15 .DS_Store 15 .DS_Store
16 .env
16 .env.local 17 .env.local
17 .env.development.local 18 .env.development.local
18 .env.test.local 19 .env.test.local
......
1 # ChatGPT Client
1 # Getting Started with Create React App 2 # Getting Started with Create React App
2
3 This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). 3 This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
4 4
5 ## Installation
6
7 npm install on the /client directory.
8
5 ## Available Scripts 9 ## Available Scripts
6 10
7 In the project directory, you can run: 11 In the /client directory, you can run:
8 12
9 ### `npm start` 13 ### `npm start`
10 14
......
...@@ -26,7 +26,7 @@ function App() { ...@@ -26,7 +26,7 @@ function App() {
26 } 26 }
27 27
28 function getEngines(){ 28 function getEngines(){
29 fetch("http://localhost:3080/models") 29 fetch(process.env.SERVER_URL + "/models")
30 .then(res => res.json()) 30 .then(res => res.json())
31 .then(data => { 31 .then(data => {
32 console.log(data.models.data) 32 console.log(data.models.data)
...@@ -48,7 +48,7 @@ function App() { ...@@ -48,7 +48,7 @@ function App() {
48 // fetch response to the api combining the chat log array of messages and seinding it as a message to localhost:3000 as a post 48 // fetch response to the api combining the chat log array of messages and seinding it as a message to localhost:3000 as a post
49 const messages = chatLogNew.map((message) => message.message).join("\n") 49 const messages = chatLogNew.map((message) => message.message).join("\n")
50 50
51 const response = await fetch("http://localhost:3080/", { 51 const response = await fetch(process.env.SERVER_URL, {
52 method: "POST", 52 method: "POST",
53 headers: { 53 headers: {
54 "Content-Type": "application/json" 54 "Content-Type": "application/json"
......
1 OPENAI_API_ORG=
2 OPENAI_API_KEY=
3 SERVER_URL=
...\ No newline at end of file ...\ No newline at end of file
...@@ -5,8 +5,8 @@ const cors = require('cors') ...@@ -5,8 +5,8 @@ const cors = require('cors')
5 5
6 // Open AI Configuration 6 // Open AI Configuration
7 const configuration = new Configuration({ 7 const configuration = new Configuration({
8 organization: "org-organization", 8 organization: process.env.OPENAI_API_ORG,
9 apiKey: "sk-apiKey", 9 apiKey: process.env.OPENAI_API_KEY,
10 }); 10 });
11 const openai = new OpenAIApi(configuration); 11 const openai = new OpenAIApi(configuration);
12 12
...@@ -22,12 +22,12 @@ app.use(require('morgan')('dev')) ...@@ -22,12 +22,12 @@ app.use(require('morgan')('dev'))
22 // Routing 22 // Routing
23 23
24 // Primary Open AI Route 24 // Primary Open AI Route
25 app.post('/', async (req, res) => { 25 app.post('/api', async (req, res) => {
26 const { message, currentModel, temperature } = req.body; 26 const { message, currentModel, temperature } = req.body;
27 const response = await openai.createCompletion({ 27 const response = await openai.createCompletion({
28 model: `${currentModel}`,// "text-davinci-003", 28 model: `${currentModel}`,// "text-davinci-003",
29 prompt: `${message}`, 29 prompt: `${message}`,
30 max_tokens: 100, 30 max_tokens: 2500,
31 temperature, 31 temperature,
32 }); 32 });
33 res.json({ 33 res.json({
......
Styling with Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!