Skip to content

Commit

Permalink
Merge pull request #148 from tegnike/feature/fix-several-external-ser…
Browse files Browse the repository at this point in the history
…vice-endpoint

いくつかのエンドポイント不具合を対応
  • Loading branch information
tegnike authored Aug 27, 2024
2 parents 144ac08 + daa5296 commit 25ccdf9
Show file tree
Hide file tree
Showing 6 changed files with 107 additions and 48 deletions.
4 changes: 2 additions & 2 deletions src/features/chat/difyChat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ export async function getDifyChatResponseStream(
files: [],
})

const response = await fetch(url, {
const response = await fetch(url.replace(/\/$/, ''), {
method: 'POST',
headers: headers,
body: body,
Expand All @@ -48,7 +48,7 @@ export async function getDifyChatResponseStream(
.filter((line) => line.startsWith('data:'))
messages.forEach((message) => {
const data = JSON.parse(message.slice(5)) // Remove 'data:' prefix
if (data.event === 'message') {
if (data.event === 'agent_message' || data.event === 'message') {
controller.enqueue(data.answer)
settingsStore.setState({
difyConversationId: data.conversation_id,
Expand Down
2 changes: 1 addition & 1 deletion src/features/chat/localLLMChat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export async function getLocalLLMChatResponseStream(
model?: string
) {
const response = await axios.post(
localLlmUrl,
localLlmUrl.replace(/\/$/, ''),
{
model: model,
messages: messages,
Expand Down
98 changes: 55 additions & 43 deletions src/features/chat/openAiChat.ts
Original file line number Diff line number Diff line change
@@ -1,69 +1,81 @@
import { OpenAI } from 'openai'
import { Message } from '../messages/messages'
import { ChatCompletionMessageParam } from 'openai/resources'

export async function getOpenAIChatResponse(
messages: Message[],
apiKey: string,
model: string
) {
if (!apiKey) {
throw new Error('Invalid API Key')
}

const openai = new OpenAI({
apiKey: apiKey,
dangerouslyAllowBrowser: true,
})

const data = await openai.chat.completions.create({
model: model,
messages: messages as ChatCompletionMessageParam[],
const response = await fetch('/api/openai', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ messages, apiKey, model }),
})

const [aiRes] = data.choices
const message = aiRes.message?.content || '回答生成時にエラーが発生しました。'

return { message: message }
const data = await response.json()
return { message: data.message }
}

export async function getOpenAIChatResponseStream(
messages: Message[],
apiKey: string,
model: string
) {
if (!apiKey) {
throw new Error('Invalid API Key')
const response = await fetch('/api/openai', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ messages, apiKey, model, stream: true }),
})

if (!response.ok) {
throw new Error('OpenAI APIリクエストに失敗しました')
}

const openai = new OpenAI({
apiKey: apiKey,
dangerouslyAllowBrowser: true,
})
if (!response.body) {
throw new Error('OpenAI APIレスポンスが空です')
}

const stream = await openai.chat.completions.create({
model: model,
messages: messages as ChatCompletionMessageParam[],
stream: true,
max_tokens: 200,
})
const reader = response.body.getReader()
const decoder = new TextDecoder('utf-8')

const res = new ReadableStream({
async start(controller: ReadableStreamDefaultController) {
try {
for await (const chunk of stream) {
const messagePiece = chunk.choices[0].delta.content
if (!!messagePiece) {
controller.enqueue(messagePiece)
return new ReadableStream({
async start(controller) {
while (true) {
const { done, value } = await reader.read()

if (done) {
break
}

const chunk = decoder.decode(value)
const lines = chunk.split('\n')

for (const line of lines) {
if (line.startsWith('data:')) {
const data = line.substring(5).trim()
if (data !== '[DONE]') {
const event = JSON.parse(data)
switch (event.type) {
case 'content_block_delta':
controller.enqueue(event.text)
break
case 'error':
throw new Error(
`OpenAI API error: ${JSON.stringify(event.error)}`
)
case 'message_stop':
controller.close()
return
}
}
}
}
} catch (error) {
controller.error(error)
} finally {
controller.close()
}

controller.close()
},
})

return res
}
2 changes: 1 addition & 1 deletion src/features/messages/speakCharacter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ export const fetchAudioVoiceGSVIApi = async (
speed: number
): Promise<ArrayBuffer> => {
const style = talk.style !== 'talk' ? talk.style : 'default'
const response = await fetch(url, {
const response = await fetch(url.replace(/\/$/, ''), {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Expand Down
47 changes: 47 additions & 0 deletions src/pages/api/openai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import { NextApiRequest, NextApiResponse } from 'next'
import OpenAI from 'openai'
import { Message } from '@/features/messages/messages'

export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const { messages, apiKey, model, stream } = req.body

const client = new OpenAI({ apiKey })

if (stream) {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
})

const stream = await client.chat.completions.create({
model: model,
messages: messages,
stream: true,
max_tokens: 200,
})

for await (const chunk of stream) {
const messagePiece = chunk.choices[0].delta.content
if (messagePiece) {
res.write(
`data: ${JSON.stringify({ type: 'content_block_delta', text: messagePiece })}\n\n`
)
}
}

res.write(`data: ${JSON.stringify({ type: 'message_stop' })}\n\n`)
res.end()
} else {
const response = await client.chat.completions.create({
model: model,
messages: messages,
max_tokens: 200,
})

res.status(200).json({ message: response.choices[0].message.content })
}
}
2 changes: 1 addition & 1 deletion src/pages/api/stylebertvits2.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ export default async function handler(

try {
const voice = await fetch(
`${stylebertvits2ServerUrl}/voice?${queryParams}`,
`${stylebertvits2ServerUrl.replace(/\/$/, '')}/voice?${queryParams}`,
{
method: 'GET',
headers: {
Expand Down

0 comments on commit 25ccdf9

Please sign in to comment.