From c016645b35a9b06c68404688b01361c1a6f461d2 Mon Sep 17 00:00:00 2001 From: Benson Wally Tran Date: Sat, 18 Nov 2023 16:50:29 -0600 Subject: [PATCH] route.ts --- app/api/open-ai/chat.ts | 31 ------------- app/api/open-ai/chat/route.ts | 44 +++++++++++++++++++ app/pdf/page.tsx | 12 ----- app/sandbox/page.tsx | 17 +++++++ .../generate/GenerateStoryComponent.tsx | 29 ++++++++++++ operations/chatOperations.ts | 1 + operations/fetch.ts | 2 +- 7 files changed, 92 insertions(+), 44 deletions(-) delete mode 100644 app/api/open-ai/chat.ts create mode 100644 app/api/open-ai/chat/route.ts delete mode 100644 app/pdf/page.tsx create mode 100644 app/sandbox/page.tsx create mode 100644 components/generate/GenerateStoryComponent.tsx diff --git a/app/api/open-ai/chat.ts b/app/api/open-ai/chat.ts deleted file mode 100644 index 20a1ff11d..000000000 --- a/app/api/open-ai/chat.ts +++ /dev/null @@ -1,31 +0,0 @@ -import OpenAI from 'openai'; - -export default async function handler(req, res) { - if (req.method === 'POST') { - const openai = new OpenAI({ - apiKey: process.env.OPENAI_API_KEY - }); - - const { model, messages, functions } = req.body.payload; - - try { - const chatCompletion = await openai.chat.completions.create({ - model: model, - messages: messages, - functions: functions - }); - - const message = chatCompletion.choices[0].message; - res.status(200).json({ text: message }); - } catch (error) { - if (error instanceof OpenAI.APIError) { - res.status(error.status).json({ error: error.message }); - } else { - res.status(500).json({ error: error.message }); - } - } - } else { - res.setHeader('Allow', ['POST']); - res.status(405).end(`Method ${req.method} Not Allowed`); - } -} diff --git a/app/api/open-ai/chat/route.ts b/app/api/open-ai/chat/route.ts new file mode 100644 index 000000000..f181bdc99 --- /dev/null +++ b/app/api/open-ai/chat/route.ts @@ -0,0 +1,44 @@ +import { NextRequest, NextResponse } from 'next/server'; +import OpenAI from 'openai'; + +export async function POST(req: NextRequest): Promise { + if (req.method === 'POST') { + const openai = new OpenAI({ + apiKey: process.env.OPENAI_API_KEY + }); + + const { model, messages, functions } = await req.json(); + + try { + const chatCompletion = await openai.chat.completions.create({ + model: model, + messages: messages, + functions: functions + }); + + const message = chatCompletion.choices[0].message; + return new NextResponse(JSON.stringify({ text: message }), { + status: 200 + }); + } catch (error) { + if (error instanceof OpenAI.APIError) { + return new NextResponse(JSON.stringify({ error: error.message }), { + status: 400 + }); + } + return new NextResponse(JSON.stringify({ error: error.message }), { + status: 500 + }); + } + } + const response = NextResponse.next(); + response.headers.set('Allow', 'POST'); + + // Return a 405 response with a custom message + return new NextResponse(`Method ${req.method} Not Allowed`, { + status: 405, + headers: { + Allow: 'POST' + } + }); +} diff --git a/app/pdf/page.tsx b/app/pdf/page.tsx deleted file mode 100644 index e035f0e50..000000000 --- a/app/pdf/page.tsx +++ /dev/null @@ -1,12 +0,0 @@ -import dynamic from 'next/dynamic'; - -// Dynamically import PDFTest with no SSR -const DynamicPDFTest = dynamic(() => import('components/pdf/PDFTest'), { - ssr: false -}); - -export const runtime = 'edge'; - -export default function Test() { - return ; -} diff --git a/app/sandbox/page.tsx b/app/sandbox/page.tsx new file mode 100644 index 000000000..40a5de41d --- /dev/null +++ b/app/sandbox/page.tsx @@ -0,0 +1,17 @@ +'use client'; + +import dynamic from 'next/dynamic'; + +// const DynamicPDFTest = dynamic(() => import('components/pdf/PDFTest'), { +// ssr: false +// }); + +const GenerateStoryComponent = dynamic(() => import('components/generate/GenerateStoryComponent'), { + ssr: false +}); + +export const runtime = 'edge'; + +export default function Test() { + return ; +} diff --git a/components/generate/GenerateStoryComponent.tsx b/components/generate/GenerateStoryComponent.tsx new file mode 100644 index 000000000..27564d0cd --- /dev/null +++ b/components/generate/GenerateStoryComponent.tsx @@ -0,0 +1,29 @@ +'use-client'; +import chatOperations from 'operations/chatOperations'; +import { useState } from 'react'; + +export default function GenerateStory() { + const [loading, setLoading] = useState(false); + const [data, setData] = useState(); + + const getStory = async () => { + setLoading(true); + const data = await chatOperations.createStoryAsync(); + setData(data); + setLoading(false); + }; + + return ( +
+ + + {loading ?
Loading...
: null} + {JSON.stringify(data)} +
+ ); +} diff --git a/operations/chatOperations.ts b/operations/chatOperations.ts index 1760909f8..dc05d8922 100644 --- a/operations/chatOperations.ts +++ b/operations/chatOperations.ts @@ -41,6 +41,7 @@ async function createStoryAsync( } ]; const data = await post('/api/open-ai/chat', generateRequestPayload(messages)); + // const data = await post('/api/revalidate', generateRequestPayload(messages)); return getFunctionCallArguments(data); } diff --git a/operations/fetch.ts b/operations/fetch.ts index df650dced..0c22bf7bc 100644 --- a/operations/fetch.ts +++ b/operations/fetch.ts @@ -5,7 +5,7 @@ async function post(path: string, payload?: any): Promise { headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ payload }) + body: JSON.stringify(payload) }); if (!response.ok) {