this repo has no description
1import { debug } from 'debug';
2import { createFallback } from 'ai-fallback';
3import { streamText } from 'ai';
4import { createOpenAI } from '@ai-sdk/openai';
5import * as fs from 'node:fs/promises';
6import * as path from 'node:path';
7
8import { makeCacheFileHelper } from './path';
9
10const log = debug('llms-txt-gen:rewrite');
11
12const cacheDir = path.join(process.cwd(), '.cache/rewrite');
13await fs.mkdir(cacheDir, { recursive: true });
14const getCacheFile = makeCacheFileHelper(cacheDir, '.txt');
15
16const SYSTEM_PROMPT = `
17Reformat markdown content you're given into an llms-full.txt file, also in markdown format
18- Reformat for an AI and paraphrase where necessary, but be faithful to the original
19- Keep code snippets and keep them in TypeScript or TypeScript typings format
20- For markdown tables, keep all relevant information in the table
21- Don't mention other content, pages, or external content
22- Don't write your own content
23- Don't add or use any knowledge you may have on the subject
24- Don't add your own interpretation or notes and only reinterpret the input content
25- Don't wrap the output in a markdown code block
26Only return the reformatted markdown content and stop when you've processed all input markdown content
27`;
28
29const ai = createOpenAI({
30 apiKey: process.env.OPENAI_API_KEY,
31 baseURL: process.env.OPENAI_API_URL,
32});
33
34const ollama = createOpenAI({
35 baseURL: 'http://localhost:11434/v1',
36});
37
38export async function rewriteMarkdown(url: URL, input: string) {
39 const cacheFile = await getCacheFile(url);
40 let content: string;
41 try {
42 content = await fs.readFile(cacheFile, 'utf-8');
43 if (content) {
44 log('prompt output from cache', url.pathname);
45 return content;
46 }
47 } catch {}
48 log('prompting to rewrite', url.pathname);
49 const { textStream } = streamText({
50 temperature: 0.05,
51 maxSteps: 5,
52 experimental_continueSteps: true,
53 model: createFallback({
54 models: [
55 ollama('phi4:14b'),
56 ai('@cf/mistralai/mistral-small-3.1-24b-instruct'),
57 ],
58 onError(error, modelId) {
59 log(`error using model ${modelId}`, error);
60 },
61 }),
62 onStepFinish({ finishReason, text }) {
63 if (finishReason !== 'stop')
64 log(`inference step (length: ${text.length})`, finishReason);
65 },
66 messages: [
67 {
68 role: 'system',
69 content: SYSTEM_PROMPT.trim(),
70 },
71 {
72 role: 'user',
73 content: input,
74 },
75 ],
76 });
77 const output: string[] = [];
78 for await (const chunk of textStream)
79 output.push(chunk);
80 const text = output.join('');
81 await fs.writeFile(cacheFile, text, 'utf-8');
82 return text;
83}