Skip to content

Commit

Permalink
♻️ Log request and response when raise a error
Browse files Browse the repository at this point in the history
  • Loading branch information
zuisong committed Jan 26, 2024
1 parent 2b0bddd commit c98335b
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 11 deletions.
14 changes: 8 additions & 6 deletions src/log.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,25 @@ enum LogLevel {
}
type Any = Parameters<typeof console.log>[0]

const currentlevel = LogLevel.debug
// 设置日志级别为 error
const currentlevel = LogLevel.error

export function gen_logger(id: string) {
return mapValues(LogLevel, (value, name) => {
return (msg: Any) => {
outFunc(name, value, `${id} ${msg}`)
outFunc(name, value, `${id} ${JSON.stringify(msg)}`)
}
})
}

export type Logger = ReturnType<typeof gen_logger>

function outFunc(levelName: string, levelValue: number, msg: string) {
// if (levelValue > currentlevel) {
// return
// }
// console.log(`${Date.now().toLocaleString()} ${levelName} ${msg}`)
if (levelValue > currentlevel) {
// 仅打印大于等于当前日志级别的日志
return
}
console.log(`${Date.now().toLocaleString()} ${levelName} ${msg}`)
}

function mapValues<
Expand Down
2 changes: 1 addition & 1 deletion src/v1/chat/completions/ChatProxyHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export const chatProxyHandler: Handler = async (c: ContextWithLogger) => {
const log = c.var.log

const req = await c.req.json<OpenAI.Chat.ChatCompletionCreateParams>()
log.debug(JSON.stringify(req))
log.debug(req)
const headers = c.req.header()
const apiKey = getToken(headers)
if (apiKey == null) {
Expand Down
9 changes: 7 additions & 2 deletions src/v1/chat/completions/NonStreamingChatProxyHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,14 @@ export const nonStreamingChatProxyHandler: ChatProxyHandlerType = async (
contents: openAiMessageToGeminiMessage(req.messages),
})
.then((it) => it.response.text())
.catch((err) => err?.message ?? err.toString())
.catch((err) => {
// 出现异常时打印请求参数和响应,以便调试
log.error(req)
log.error(err)
return err?.message ?? err.toString()
})

log.debug(JSON.stringify(geminiResp))
log.debug(geminiResp)

const resp: OpenAI.Chat.ChatCompletion = {
id: "chatcmpl-abc123",
Expand Down
6 changes: 4 additions & 2 deletions src/v1/chat/completions/StreamingChatProxyHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,15 @@ export const streamingChatProxyHandler: ChatProxyHandlerType = async (
data: JSON.stringify(genOpenAiResp("", true)),
})
const geminiResult = (await response).text()
log.info(JSON.stringify(geminiResult))
log.info(geminiResult)
})
.catch(async (e) => {
await sseStream.writeSSE({
data: JSON.stringify(genOpenAiResp(e.toString(), true)),
})
log.info(e)
// 出现异常时打印请求参数和响应,以便调试
log.error(req)
log.error(e)
})

await sseStream.writeSSE({ data: "[DONE]" })
Expand Down

0 comments on commit c98335b

Please sign in to comment.