Skip to content

Commit

Permalink
Merge pull request #35 from upstash/fix-logger
Browse files Browse the repository at this point in the history
fix: missing debug info
  • Loading branch information
ogzhanolguncu authored Aug 6, 2024
2 parents a944046 + 5b80d68 commit fa70ac4
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 16 deletions.
2 changes: 1 addition & 1 deletion src/context-service/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ export class ContextService {

const clonedContext = structuredClone(originalContext);
const modifiedContext = await optionsWithDefault.onContextFetched?.(clonedContext);
await debug?.endRetrieveContext(modifiedContext);
await debug?.endRetrieveContext(modifiedContext ?? originalContext);

return {
formattedContext: formatFacts((modifiedContext ?? originalContext).map(({ data }) => data)),
Expand Down
13 changes: 12 additions & 1 deletion src/logger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,18 @@ export class ChatLogger {
private async writeToConsole(logEntry: ChatLogEntry): Promise<void> {
const JSON_SPACING = 2;
// eslint-disable-next-line no-console
console.log(JSON.stringify(logEntry, undefined, JSON_SPACING));
console.log(
JSON.stringify(
{
...logEntry,
...(logEntry.latency && logEntry.latency > 0
? { latency: `${logEntry.latency}ms` }
: undefined),
},
undefined,
JSON_SPACING
)
);
}

private shouldLog(level: LogLevel): boolean {
Expand Down
35 changes: 21 additions & 14 deletions src/rag-chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,8 @@ export class RAGChat {
const question = sanitizeQuestion(input);
const { formattedContext: context, metadata } = await this.context._getContext<TMetadata>(
optionsWithDefault,
input
input,
this.debug
);
const formattedHistory = await this.getChatHistory(optionsWithDefault);

Expand All @@ -104,20 +105,26 @@ export class RAGChat {
);

// Either calls streaming or non-streaming function from RAGChatBase. Streaming function returns AsyncIterator and allows callbacks like onComplete.
const llmResult = await this.llm.callLLM<TChatOptions>(optionsWithDefault, prompt, options, {
onChunk: optionsWithDefault.onChunk,
onComplete: async (output) => {
await this.debug?.endLLMResponse(output);
await this.history.addMessage({
message: {
content: output,
metadata: optionsWithDefault.metadata,
role: "assistant",
},
sessionId: optionsWithDefault.sessionId,
});
const llmResult = await this.llm.callLLM<TChatOptions>(
optionsWithDefault,
prompt,
options,
{
onChunk: optionsWithDefault.onChunk,
onComplete: async (output) => {
await this.debug?.endLLMResponse(output);
await this.history.addMessage({
message: {
content: output,
metadata: optionsWithDefault.metadata,
role: "assistant",
},
sessionId: optionsWithDefault.sessionId,
});
},
},
});
this.debug
);

return {
...llmResult,
Expand Down

0 comments on commit fa70ac4

Please sign in to comment.