T1ckbase
first commit
90989cc
raw
history blame
1.93 kB
import '@std/dotenv/load';
import { createOpenAICompatible } from '@ai-sdk/openai-compatible';
import { generateText, wrapLanguageModel } from 'ai';
import { extractWeirdReasoningMiddleware } from './extract-weird-reasoning-middleware.ts';
import { imagePromptGeneratorPrompt, systemPrompt } from './prompt.ts';
const HF_ENDPOINT = Deno.env.get('TEST_URL') || 'https://api-inference.huggingface.co';
type ImageGenerationParams = {
inputs: string;
parameters?: {
guidance_scale?: number;
negative_prompt?: string;
num_inference_steps?: number;
width?: number;
height?: number;
scheduler?: string;
seed?: number;
};
};
export const huggingface = createOpenAICompatible({
baseURL: `${HF_ENDPOINT}/v1`,
name: 'huggingface',
});
// export const model = wrapLanguageModel({
// model: huggingface('deepseek-ai/DeepSeek-R1-Distill-Qwen-32B'),
// middleware: extractWeirdReasoningMiddleware({ tagName: 'think', onlyClosingTag: true }),
// });
export const model = huggingface('meta-llama/Llama-3.2-11B-Vision-Instruct');
export async function generatePrompt(prompt: string, systemPrompt: string) {
const { text } = await generateText({
model,
system: systemPrompt,
prompt,
maxRetries: 3,
maxTokens: 1024,
});
return text;
}
export async function generateImage(model: 'black-forest-labs/FLUX.1-dev' | 'black-forest-labs/FLUX.1-schnell' | 'stabilityai/stable-diffusion-3.5-large', params: ImageGenerationParams) {
const res = await fetch(`${HF_ENDPOINT}/models/${model}`, {
method: 'POST',
headers: {
'x-use-cache': 'false',
},
body: JSON.stringify(params),
});
if (!res.ok) throw new Error(`Failed to generate image ${res.statusText} ${res.status}`);
return await res.arrayBuffer();
}
if (import.meta.main) {
const prompt = await generatePrompt(imagePromptGeneratorPrompt, systemPrompt);
console.log(prompt.replaceAll('\n', ' '));
}