Skip to content

Commit

Permalink
Merge pull request #37 from gluneau/chatgpt
Browse files Browse the repository at this point in the history
simple chatgpt interface
  • Loading branch information
zcpua authored Jun 12, 2023
2 parents bfc944d + a4269df commit 0f0d82e
Show file tree
Hide file tree
Showing 8 changed files with 337 additions and 3 deletions.
3 changes: 2 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@ SALAI_TOKEN="Token of the Account from which you paid MidJourney"
SERVER_ID="Server id here"
CHANNEL_ID="Channel in which commands are sent"
NEXT_PUBLIC_IMAGE_PREFIX="/"
HUGGINGFACE_TOKEN="huggingface token here https://huggingface.co/docs/hub/security-tokens"
HUGGINGFACE_TOKEN="huggingface token here https://huggingface.co/docs/hub/security-tokens"
OPENAI_API_KEY="openai api key here"
183 changes: 183 additions & 0 deletions components/Form.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,183 @@
'use client'
import { useRef, useState } from 'react'
import useSWR from 'swr'

interface ModelType {
object: 'engine'
id: string
ready: boolean
owner: string
permissions: null
created: string
}

const Form = () => {
const messageInput = useRef<HTMLTextAreaElement | null>(null)
const [response, setResponse] = useState<string[]>([])
const [isLoading, setIsLoading] = useState<boolean>(false)
const [models, setModels] = useState<ModelType[]>([])
const [currentModel, setCurrentModel] = useState<string>('gpt-4')

const handleEnter = (
e: React.KeyboardEvent<HTMLTextAreaElement> &
React.FormEvent<HTMLFormElement>
) => {
if (e.key === 'Enter' && isLoading === false) {
e.preventDefault()
setIsLoading(true)
handleSubmit(e)
}
}

const handleSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault()
const message = messageInput.current?.value
if (message !== undefined) {
setResponse((prev) => [...prev, message])
messageInput.current!.value = ''
}

if (!message) {
return
}

const response = await fetch('/api/response', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
message,
currentModel,
}),
})
console.log('Edge function returned.')

console.log(response)

if (!response.ok) {
throw new Error(response.statusText)
}

const data = response.body
if (!data) {
return
}

const reader = data.getReader()
const decoder = new TextDecoder()
let done = false

setResponse((prev) => [...prev, message])

let currentResponse: string[] = []
while (!done) {
const { value, done: doneReading } = await reader.read()
done = doneReading
const chunkValue = decoder.decode(value)
// currentResponse = [...currentResponse, message, chunkValue];
currentResponse = [...currentResponse, chunkValue]
setResponse((prev) => [...prev.slice(0, -1), currentResponse.join('')])
}
// breaks text indent on refresh due to streaming
// localStorage.setItem('response', JSON.stringify(currentResponse));
}

const handleReset = () => {
localStorage.removeItem('response')
setResponse([])
}

useSWR('fetchingResponse', async () => {
const storedResponse = localStorage.getItem('response')
if (storedResponse) {
setResponse(JSON.parse(storedResponse))
}
})

const handleModelChange = (e: React.ChangeEvent<HTMLSelectElement>) => {
setCurrentModel(e.target.value)
}

return (
<div className='flex justify-center'>
<button
onClick={handleReset}
type='reset'
className='fixed top-5 right-5 p-4 rounded-md bg-white text-gray-500 dark:hover:text-gray-400 dark:hover:bg-gray-900 disabled:hover:bg-transparent dark:disabled:hover:bg-transparent'
>
Clear History
</button>
<div className='w-full mx-2 flex flex-col items-start gap-3 pt-6 last:mb-6 md:mx-auto md:max-w-3xl'>
{isLoading
? response.map((item: any, index: number) => {
return (
<div
key={index}
className={`${
index % 2 === 0 ? 'bg-blue-500' : 'bg-gray-500'
} p-3 rounded-lg`}
>
<p>{item}</p>
</div>
)
})
: response
? response.map((item: string, index: number) => {
return (
<div
key={index}
className={`${
index % 2 === 0 ? 'bg-blue-500' : 'bg-gray-500'
} p-3 rounded-lg`}
>
<p>{item}</p>
</div>
)
})
: null}
</div>
<form
onSubmit={handleSubmit}
className="absolute z-10 w-3/4 xl:w-3/5 right-0 bottom-10 left-0 mx-auto "
>
<textarea
name='Message'
placeholder='Type your query'
ref={messageInput}
onKeyDown={handleEnter}
className='w-full'
style={{ paddingRight: 30 }}
/>
<button
disabled={isLoading}
type='submit'
title="Send"
style={{
position: "absolute",
bottom: 0,
right: 0,
background: "transparent",
border: "none",
boxShadow: "none",
}}
>
<svg
stroke='currentColor'
fill='currentColor'
strokeWidth='0'
viewBox='0 0 20 20'
className='h-4 w-4 rotate-90'
height='1em'
width='1em'
xmlns='http://www.w3.org/2000/svg'
>
<path d='M10.894 2.553a1 1 0 00-1.788 0l-7 14a1 1 0 001.169 1.409l5-1.429A1 1 0 009 15.571V11a1 1 0 112 0v4.571a1 1 0 00.725.962l5 1.428a1 1 0 001.17-1.408l-7-14z'></path>
</svg>
</button>
</form>
</div>
)
}

export default Form
6 changes: 6 additions & 0 deletions layouts/main.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import React, { useEffect, useState } from 'react';
import {
SmileOutlined,
GithubFilled,
WechatOutlined,
PictureFilled,

} from '@ant-design/icons'
Expand All @@ -23,6 +24,11 @@ const ROUTES: Route = {
name: 'MidJourney',
icon: <SmileOutlined />,
},
{
path: '/chatgpt',
name: 'ChatGPT',
icon: <WechatOutlined />,
},
],
}

Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@
"axios": "^1.3.6",
"eventsource-parser": "^1.0.0",
"midjourney": "^2.4.49",
"next": "^13.4.4"
"next": "^13.4.4",
"openai": "^3.2.1"
},
"keywords": [
"midjourney-api",
Expand Down
36 changes: 36 additions & 0 deletions pages/api/response.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import { OpenAIStream, OpenAIStreamPayload } from '../../utils/OpenAIStream'

type RequestData = {
currentModel: string
message: string
}

if (!process.env.OPENAI_API_KEY) {
throw new Error('Missing env var from OpenAI')
}

export const runtime = 'edge'

export default async function handler(request: Request) {
const { currentModel, message } = (await request.json()) as RequestData

if (!message) {
return new Response('No message in the request', { status: 400 })
}

const payload: OpenAIStreamPayload = {
model: 'gpt-4',
// model: `${currentModel}`,
messages: [{ role: 'user', content: message }],
temperature: 0.7,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
max_tokens: 2048,
stream: true,
n: 1,
}

const stream = await OpenAIStream(payload)
return new Response(stream)
}
12 changes: 12 additions & 0 deletions pages/chatgpt.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
import { Inter } from 'next/font/google';
import Form from '../components/Form';

const inter = Inter({ subsets: ['latin'] });

export default function Home() {
return (
<main className={inter.className}>
<Form />
</main>
);
}
80 changes: 80 additions & 0 deletions utils/OpenAIStream.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import {
createParser,
ParsedEvent,
ReconnectInterval,
} from 'eventsource-parser';

export type ChatGPTAgent = 'user' | 'system';

export interface ChatGPTMessage {
role: ChatGPTAgent;
content: string;
}

export interface OpenAIStreamPayload {
model: string;
messages: ChatGPTMessage[];
temperature: number;
top_p: number;
frequency_penalty: number;
presence_penalty: number;
max_tokens: number;
stream: boolean;
n: number;
}

export async function OpenAIStream(payload: OpenAIStreamPayload) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();

let counter = 0;

const res = await fetch('https://api.openai.com/v1/chat/completions', {
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${process.env.OPENAI_API_KEY ?? ''}`,
},
method: 'POST',
body: JSON.stringify(payload),
});

const stream = new ReadableStream({
async start(controller) {
// callback
function onParse(event: ParsedEvent | ReconnectInterval) {
if (event.type === 'event') {
const data = event.data;
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
if (data === '[DONE]') {
controller.close();
return;
}
try {
const json = JSON.parse(data);
const text = json.choices[0].delta?.content || '';
if (counter < 2 && (text.match(/\n/) || []).length) {
// this is a prefix character (i.e., "\n\n"), do nothing
return;
}
const queue = encoder.encode(text);
controller.enqueue(queue);
counter++;
} catch (e) {
// maybe parse error
controller.error(e);
}
}
}

// stream response (SSE) from OpenAI may be fragmented into multiple chunks
// this ensures we properly read chunks and invoke an event for each SSE event stream
const parser = createParser(onParse);
// https://web.dev/streams/#asynchronous-iteration
for await (const chunk of res.body as any) {
parser.feed(decoder.decode(chunk));
}
},
});

return stream;
}
17 changes: 16 additions & 1 deletion yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -915,6 +915,13 @@ axe-core@^4.6.2:
resolved "https://registry.npmjs.org/axe-core/-/axe-core-4.7.0.tgz"
integrity sha512-M0JtH+hlOL5pLQwHOLNYZaXuhqmvS8oExsqB1SBYgA4Dk7u/xx+YdGHXaK5pyUfed5mYXdlYiphWq3G8cRi5JQ==

axios@^0.26.0:
version "0.26.1"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9"
integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==
dependencies:
follow-redirects "^1.14.8"

axios@^1.3.6:
version "1.3.6"
resolved "https://registry.npmjs.org/axios/-/axios-1.3.6.tgz"
Expand Down Expand Up @@ -1624,7 +1631,7 @@ flatted@^3.1.0:
resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz"
integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==

follow-redirects@^1.15.0:
follow-redirects@^1.14.8, follow-redirects@^1.15.0:
version "1.15.2"
resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz"
integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==
Expand Down Expand Up @@ -2435,6 +2442,14 @@ open@^8.4.0:
is-docker "^2.1.1"
is-wsl "^2.2.0"

openai@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/openai/-/openai-3.2.1.tgz#1fa35bdf979cbde8453b43f2dd3a7d401ee40866"
integrity sha512-762C9BNlJPbjjlWZi4WYK9iM2tAVAv0uUp1UmI34vb0CN5T2mjB/qM6RYBmNKMh/dN9fC+bxqPwWJZUTWW052A==
dependencies:
axios "^0.26.0"
form-data "^4.0.0"

optionator@^0.9.1:
version "0.9.1"
resolved "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz"
Expand Down

1 comment on commit 0f0d82e

@vercel
Copy link

@vercel vercel bot commented on 0f0d82e Jun 12, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.