Spaces:
Running
Running
File size: 1,729 Bytes
6e29063 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
import type { ChatMessage } from "gpt-tokenizer/GptEncoding";
import { addLogEntry } from "./logEntries";
import { generateChatResponse } from "./textGeneration";
export interface FollowUpQuestionParams {
topic: string;
currentContent: string;
}
export async function generateFollowUpQuestion({
topic,
currentContent,
}: FollowUpQuestionParams): Promise<string> {
try {
addLogEntry("Generating a follow-up question");
const promptMessages: ChatMessage[] = [
{
role: "user",
content: topic,
},
{
role: "assistant",
content: currentContent,
},
{
role: "user",
content:
"Based on the previous question and your response, generate a single, " +
"concise follow-up question that explores an important unexplored aspect " +
"of the topic. The question should be 1-2 sentences maximum and end with a question mark. " +
"Respond with just the question, no additional text or explanations. " +
"Generate it using the same language as the previous question and your response.",
},
];
const response = await generateChatResponse(promptMessages, () => {});
const lines = response
.trim()
.split("\n")
.map((line) => line.trim())
.filter((line) => line.length > 0);
const questionLine = lines.reverse().find((line) => line.endsWith("?"));
if (!questionLine) {
addLogEntry("No valid follow-up question generated");
return "";
}
addLogEntry("Generated follow-up question successfully");
return questionLine;
} catch (error) {
addLogEntry(`Error generating follow-up question: ${error}`);
return "";
}
}
|