File size: 1,932 Bytes
90989cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import '@std/dotenv/load';
import { createOpenAICompatible } from '@ai-sdk/openai-compatible';
import { generateText, wrapLanguageModel } from 'ai';
import { extractWeirdReasoningMiddleware } from './extract-weird-reasoning-middleware.ts';
import { imagePromptGeneratorPrompt, systemPrompt } from './prompt.ts';

const HF_ENDPOINT = Deno.env.get('TEST_URL') || 'https://api-inference.huggingface.co';

type ImageGenerationParams = {
  inputs: string;
  parameters?: {
    guidance_scale?: number;
    negative_prompt?: string;
    num_inference_steps?: number;
    width?: number;
    height?: number;
    scheduler?: string;
    seed?: number;
  };
};

export const huggingface = createOpenAICompatible({
  baseURL: `${HF_ENDPOINT}/v1`,
  name: 'huggingface',
});

// export const model = wrapLanguageModel({
//   model: huggingface('deepseek-ai/DeepSeek-R1-Distill-Qwen-32B'),
//   middleware: extractWeirdReasoningMiddleware({ tagName: 'think', onlyClosingTag: true }),
// });
export const model = huggingface('meta-llama/Llama-3.2-11B-Vision-Instruct');

export async function generatePrompt(prompt: string, systemPrompt: string) {
  const { text } = await generateText({
    model,
    system: systemPrompt,
    prompt,
    maxRetries: 3,
    maxTokens: 1024,
  });
  return text;
}

export async function generateImage(model: 'black-forest-labs/FLUX.1-dev' | 'black-forest-labs/FLUX.1-schnell' | 'stabilityai/stable-diffusion-3.5-large', params: ImageGenerationParams) {
  const res = await fetch(`${HF_ENDPOINT}/models/${model}`, {
    method: 'POST',
    headers: {
      'x-use-cache': 'false',
    },
    body: JSON.stringify(params),
  });
  if (!res.ok) throw new Error(`Failed to generate image ${res.statusText} ${res.status}`);
  return await res.arrayBuffer();
}

if (import.meta.main) {
  const prompt = await generatePrompt(imagePromptGeneratorPrompt, systemPrompt);
  console.log(prompt.replaceAll('\n', ' '));
}