1import { env } from '$env/dynamic/private'
2import { get, writable } from 'svelte/store'
3import { type Robot } from 'robots-parser'
4import robotsParser from 'robots-parser'
5import { PUBLIC_BASE_URL } from '$env/static/public'
6
7const cachedParsedRobots = writable<Robot | null>(null)
8const cachedRobots = writable<string>("")
9const lastFetched = writable<number>(Date.now())
10
11const fetchRobotsTxt = async () => {
12 const robotsTxtResp = await fetch(
13 "https://api.darkvisitors.com/robots-txts",
14 {
15 method: "POST",
16 headers: {
17 "Authorization": `Bearer ${env.DARK_VISITORS_TOKEN}`,
18 "Content-Type": "application/json"
19 },
20 body: JSON.stringify({
21 agent_types: [
22 "AI Assistant",
23 "AI Data Scraper",
24 "AI Search Crawler",
25 "Undocumented AI Agent",
26 ],
27 disallow: "/"
28 })
29 }
30 )
31 const robotsTxt = await robotsTxtResp.text()
32 lastFetched.set(Date.now())
33 return robotsTxt
34}
35
36export const getRobotsTxt = async () => {
37 let robotsTxt = get(cachedRobots)
38 if (robotsTxt.length === 0 || Date.now() - get(lastFetched) > 1000 * 60 * 60 * 24) {
39 robotsTxt = await fetchRobotsTxt()
40 cachedRobots.set(robotsTxt)
41 cachedParsedRobots.set(robotsParser(`${PUBLIC_BASE_URL}/robots.txt`, robotsTxt))
42 }
43 return robotsTxt
44}
45
46export const testUa = async (url: string, ua: string) => {
47 if (ua.length === 0) return false
48 let parsedRobots = get(cachedParsedRobots)
49 if (parsedRobots === null) {
50 parsedRobots = robotsParser(`${PUBLIC_BASE_URL}/robots.txt`, await getRobotsTxt())
51 cachedParsedRobots.set(parsedRobots)
52 }
53 return parsedRobots.isAllowed(url, ua)
54}