Files
InfoFlow/src/llm/gemini.ts

50 lines
1.7 KiB
TypeScript

async function callGeminiAPI(articleContent: string[] | []): Promise<any> {
// articleContent = `You are a helpful assistant that summarizes WeChat articles.use Chinese: ${articleContent}`;
// const response = (await model.generateContent(articleContent)).response;
// return response.text();
const {
GoogleGenerativeAI,
HarmCategory,
HarmBlockThreshold,
} = require("@google/generative-ai");
const genAI = new GoogleGenerativeAI(process.env.API_KEY || "");
console.log(process.env.API_KEY);
const model = genAI.getGenerativeModel({
model: "gemini-2.0-flash-lite-preview-02-05"
});
const generationConfig = {
temperature: 1,
topP: 0.95,
topK: 40,
maxOutputTokens: 8192,
responseMimeType: "text/plain",
};
async function run(articleContent: string[]) {
const chatSession = model.startChat({
generationConfig,
history: [
{
role: "user",
parts: [
{ text: "这里有几篇文章,用 --- 分割,帮我简要汇总一下,控制在 300 字以内。" },
{ text: "如果你认为是投资主题,我希望你能额外帮我总结和设计出一个短中长线的投资策略,要求详略得当,短线权重大一些,长线一笔带过就好" }
],
},
],
});
const result = await chatSession.sendMessage(articleContent.join("\n---\n"));
console.log(result.response.text());
return result.response.text();
}
return run(articleContent);
}
export default callGeminiAPI;