tracks lexicons and how many times they appeared on the jetstream

Compare changes

Choose any two refs to compare.

+12 -3
README.md
···
-
a webapp and server that monitors the jetstream and tracks the different lexicons as they are created or deleted.
-
it shows you which collections are most active on the network.
for backend it uses rust with fjall as db, the frontend is built with sveltekit.
see [here](https://gaze.systems/nsid-tracker) for a hosted instance of it.
## running
### with nix
-
- run the server: `nix run git+https://tangled.sh/@poor.dog/nsid-tracker#server`
- build the client: `nix build git+https://tangled.sh/@poor.dog/nsid-tracker#client`
### manually
···
+
a webapp and server that monitors the jetstream and tracks the different
+
lexicons as they are created or deleted. it shows you which collections are most
+
active on the network.
for backend it uses rust with fjall as db, the frontend is built with sveltekit.
see [here](https://gaze.systems/nsid-tracker) for a hosted instance of it.
+
## performance / storage
+
+
it uses about 50MB of space for 620M recorded events (events being just
+
timestamp in seconds and deleted boolean for now). and around 50-60ms for
+
querying 300-400k events.
+
+
this is on a machine with AMD EPYC 7281 (32) @ 2.100GHz.
+
## running
### with nix
+
- build the server: `nix build git+https://tangled.sh/@poor.dog/nsid-tracker#server`
- build the client: `nix build git+https://tangled.sh/@poor.dog/nsid-tracker#client`
### manually
+9
client/bun.lock
···
"name": "nsid-tracker",
"dependencies": {
"@number-flow/svelte": "^0.3.9",
},
"devDependencies": {
"@eslint/compat": "^1.2.5",
···
"globals": ["globals@16.3.0", "", {}, "sha512-bqWEnJ1Nt3neqx2q5SFfGS8r/ahumIakg3HcwtNlrVlwXIeNumWn/c7Pn/wKzGhf6SaW6H6uWXLqC30STCMchQ=="],
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
"graphemer": ["graphemer@1.4.0", "", {}, "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="],
···
"svelte": ["svelte@5.36.8", "", { "dependencies": { "@ampproject/remapping": "^2.3.0", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-8JbZWQu96hMjH/oYQPxXW6taeC6Awl6muGHeZzJTxQx7NGRQ/J9wN1hkzRKLOlSDlbS2igiFg7p5xyTp5uXG3A=="],
"svelte-check": ["svelte-check@4.3.0", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-Iz8dFXzBNAM7XlEIsUjUGQhbEE+Pvv9odb9+0+ITTgFWZBGeJRRYqHUUglwe2EkLD5LIsQaAc4IUJyvtKuOO5w=="],
"svelte-eslint-parser": ["svelte-eslint-parser@1.3.0", "", { "dependencies": { "eslint-scope": "^8.2.0", "eslint-visitor-keys": "^4.0.0", "espree": "^10.0.0", "postcss": "^8.4.49", "postcss-scss": "^4.0.9", "postcss-selector-parser": "^7.0.0" }, "peerDependencies": { "svelte": "^3.37.0 || ^4.0.0 || ^5.0.0" }, "optionalPeers": ["svelte"] }, "sha512-VCgMHKV7UtOGcGLGNFSbmdm6kEKjtzo5nnpGU/mnx4OsFY6bZ7QwRF5DUx+Hokw5Lvdyo8dpk8B1m8mliomrNg=="],
···
"tapable": ["tapable@2.2.2", "", {}, "sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg=="],
"tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="],
"tinyglobby": ["tinyglobby@0.2.14", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ=="],
···
"name": "nsid-tracker",
"dependencies": {
"@number-flow/svelte": "^0.3.9",
+
"svelte-adapter-bun": "^0.5.2",
},
"devDependencies": {
"@eslint/compat": "^1.2.5",
···
"globals": ["globals@16.3.0", "", {}, "sha512-bqWEnJ1Nt3neqx2q5SFfGS8r/ahumIakg3HcwtNlrVlwXIeNumWn/c7Pn/wKzGhf6SaW6H6uWXLqC30STCMchQ=="],
+
"globalyzer": ["globalyzer@0.1.0", "", {}, "sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q=="],
+
+
"globrex": ["globrex@0.1.2", "", {}, "sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg=="],
+
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
"graphemer": ["graphemer@1.4.0", "", {}, "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="],
···
"svelte": ["svelte@5.36.8", "", { "dependencies": { "@ampproject/remapping": "^2.3.0", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-8JbZWQu96hMjH/oYQPxXW6taeC6Awl6muGHeZzJTxQx7NGRQ/J9wN1hkzRKLOlSDlbS2igiFg7p5xyTp5uXG3A=="],
+
"svelte-adapter-bun": ["svelte-adapter-bun@0.5.2", "", { "dependencies": { "tiny-glob": "^0.2.9" } }, "sha512-xEtFgaal6UgrCwwkSIcapO9kopoFNUYCYqyKCikdqxX9bz2TDYnrWQZ7qBnkunMxi1HOIERUCvTcebYGiarZLA=="],
+
"svelte-check": ["svelte-check@4.3.0", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-Iz8dFXzBNAM7XlEIsUjUGQhbEE+Pvv9odb9+0+ITTgFWZBGeJRRYqHUUglwe2EkLD5LIsQaAc4IUJyvtKuOO5w=="],
"svelte-eslint-parser": ["svelte-eslint-parser@1.3.0", "", { "dependencies": { "eslint-scope": "^8.2.0", "eslint-visitor-keys": "^4.0.0", "espree": "^10.0.0", "postcss": "^8.4.49", "postcss-scss": "^4.0.9", "postcss-selector-parser": "^7.0.0" }, "peerDependencies": { "svelte": "^3.37.0 || ^4.0.0 || ^5.0.0" }, "optionalPeers": ["svelte"] }, "sha512-VCgMHKV7UtOGcGLGNFSbmdm6kEKjtzo5nnpGU/mnx4OsFY6bZ7QwRF5DUx+Hokw5Lvdyo8dpk8B1m8mliomrNg=="],
···
"tapable": ["tapable@2.2.2", "", {}, "sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg=="],
"tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="],
+
+
"tiny-glob": ["tiny-glob@0.2.9", "", { "dependencies": { "globalyzer": "0.1.0", "globrex": "^0.1.2" } }, "sha512-g/55ssRPUjShh+xkfx9UPDXqhckHEsHr4Vd9zX55oSdGZc/MD0m3sferOkwWtp98bv+kcVfEHtRJgBVJzelrzg=="],
"tinyglobby": ["tinyglobby@0.2.14", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ=="],
+2 -1
client/package.json
···
},
"type": "module",
"dependencies": {
-
"@number-flow/svelte": "^0.3.9"
}
}
···
},
"type": "module",
"dependencies": {
+
"@number-flow/svelte": "^0.3.9",
+
"svelte-adapter-bun": "^0.5.2"
}
}
+4
client/src/app.css
···
overflow-y: overlay;
overflow-y: auto; /* Fallback for browsers that don't support overlay */
}
···
overflow-y: overlay;
overflow-y: auto; /* Fallback for browsers that don't support overlay */
}
+
+
.wsbadge {
+
@apply text-sm font-semibold mt-1.5 px-2.5 py-0.5 rounded-full border;
+
}
+9 -9
client/src/app.html
···
<!doctype html>
<html lang="en">
-
<head>
-
<meta charset="utf-8" />
-
<link rel="icon" href="%sveltekit.assets%/favicon.svg" />
-
<meta name="viewport" content="width=device-width, initial-scale=1" />
-
%sveltekit.head%
-
</head>
-
<body data-sveltekit-preload-data="hover">
-
<div style="display: contents">%sveltekit.body%</div>
-
</body>
</html>
···
<!doctype html>
<html lang="en">
+
<head>
+
<meta charset="utf-8" />
+
<link rel="icon" href="%sveltekit.assets%/favicon.svg" />
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
+
%sveltekit.head%
+
</head>
+
<body class="bg-white dark:bg-gray-900" data-sveltekit-preload-data="hover">
+
<div style="display: contents">%sveltekit.body%</div>
+
</body>
</html>
+2 -9
client/src/lib/components/BskyToggle.svelte
···
<!-- svelte-ignore a11y_no_static_element_interactions -->
<button
onclick={onBskyToggle}
-
class="wsbadge !mt-0 !font-normal bg-yellow-100 hover:bg-yellow-200 border-yellow-300"
>
<input checked={dontShowBsky} type="checkbox" />
-
<span class="ml-0.5"> hide app.bsky.* </span>
</button>
-
-
<style lang="postcss">
-
@reference "../../app.css";
-
.wsbadge {
-
@apply text-sm font-semibold mt-1.5 px-2.5 py-0.5 rounded-full border;
-
}
-
</style>
···
<!-- svelte-ignore a11y_no_static_element_interactions -->
<button
onclick={onBskyToggle}
+
class="wsbadge !mt-0 !font-normal bg-blue-100 dark:bg-blue-900 hover:bg-blue-200 dark:hover:bg-blue-800 border-blue-300 dark:border-blue-700"
>
<input checked={dontShowBsky} type="checkbox" />
+
<span class="ml-0.5 text-black dark:text-gray-200"> hide app.bsky.* </span>
</button>
+8 -5
client/src/lib/components/EventCard.svelte
···
</script>
<div
-
class="group flex flex-col gap-2 p-1.5 md:p-3 min-h-64 bg-white border border-gray-200 rounded-lg hover:shadow-lg md:hover:-translate-y-1 transition-all duration-200 transform"
class:has-activity={isAnimating}
style="--border-thickness: {borderThickness}px"
>
<div class="flex items-start gap-2">
<div
-
class="text-sm font-bold text-blue-600 bg-blue-100 px-3 py-1 rounded-full"
>
#{index + 1}
</div>
<div
title={event.nsid}
-
class="font-mono text-sm text-gray-700 mt-0.5 leading-relaxed rounded-full text-nowrap text-ellipsis overflow-hidden group-hover:overflow-visible group-hover:bg-gray-50 border-gray-100 group-hover:border transition-all px-1"
>
{event.nsid}
</div>
···
</div>
</div>
-
<style>
.has-activity {
position: relative;
transition: all 0.2s ease-out;
}
.has-activity::before {
content: "";
position: absolute;
top: calc(-1 * var(--border-thickness));
left: calc(-1 * var(--border-thickness));
right: calc(-1 * var(--border-thickness));
bottom: calc(-1 * var(--border-thickness));
-
border: var(--border-thickness) solid rgba(59, 130, 246, 0.8);
border-radius: calc(0.5rem + var(--border-thickness));
pointer-events: none;
transition: all 0.3s ease-out;
···
</script>
<div
+
class="group flex flex-col gap-2 p-1.5 md:p-3 min-h-64 bg-white dark:bg-gray-800/50 border border-gray-200 dark:border-gray-950 rounded-lg hover:shadow-lg md:hover:-translate-y-1 transition-all duration-200 transform"
class:has-activity={isAnimating}
style="--border-thickness: {borderThickness}px"
>
<div class="flex items-start gap-2">
<div
+
class="text-sm font-bold text-blue-600 bg-blue-100 dark:bg-indigo-950 px-3 py-1 rounded-full"
>
#{index + 1}
</div>
<div
title={event.nsid}
+
class="font-mono text-sm text-gray-700 dark:text-gray-300 mt-0.5 leading-relaxed rounded-full text-nowrap text-ellipsis overflow-hidden group-hover:overflow-visible group-hover:bg-gray-50 dark:group-hover:bg-gray-700 border-gray-100 dark:border-gray-900 group-hover:border transition-all px-1"
>
{event.nsid}
</div>
···
</div>
</div>
+
<style lang="postcss">
.has-activity {
position: relative;
transition: all 0.2s ease-out;
}
.has-activity::before {
+
@reference "../../app.css";
+
@apply border-blue-500 dark:border-blue-800;
content: "";
position: absolute;
top: calc(-1 * var(--border-thickness));
left: calc(-1 * var(--border-thickness));
right: calc(-1 * var(--border-thickness));
bottom: calc(-1 * var(--border-thickness));
+
border-width: var(--border-thickness);
+
border-style: solid;
border-radius: calc(0.5rem + var(--border-thickness));
pointer-events: none;
transition: all 0.3s ease-out;
+5 -10
client/src/lib/components/FilterControls.svelte
···
</script>
<div
-
class="wsbadge !pl-2 !px-1 !mt-0 !font-normal bg-blue-100 hover:bg-blue-200 border-blue-300"
>
-
<label for="filter-regex" class="text-blue-800 mr-1"> filter: </label>
<input
id="filter-regex"
value={filterRegex}
oninput={(e) => onFilterChange((e.target as HTMLInputElement).value)}
type="text"
placeholder="regex..."
-
class="bg-blue-50 text-blue-900 placeholder-blue-400 border border-blue-200 rounded-full px-1 outline-none focus:bg-white focus:border-blue-400 min-w-0 w-24"
/>
</div>
-
-
<style lang="postcss">
-
@reference "../../app.css";
-
.wsbadge {
-
@apply text-sm font-semibold mt-1.5 px-2.5 py-0.5 rounded-full border;
-
}
-
</style>
···
</script>
<div
+
class="wsbadge !pl-2 !px-1 !mt-0 !font-normal bg-blue-100 dark:bg-blue-900 hover:bg-blue-200 dark:hover:bg-blue-800 border-blue-300 dark:border-blue-700"
>
+
<label for="filter-regex" class="text-blue-800 dark:text-gray-200 mr-1">
+
filter:
+
</label>
<input
id="filter-regex"
value={filterRegex}
oninput={(e) => onFilterChange((e.target as HTMLInputElement).value)}
type="text"
placeholder="regex..."
+
class="bg-blue-50 dark:bg-blue-950 text-blue-900 dark:text-gray-400 placeholder-blue-400 dark:placeholder-blue-700 border border-blue-200 dark:border-blue-700 rounded-full px-1 outline-none focus:border-blue-400 min-w-0 w-24"
/>
</div>
+6 -11
client/src/lib/components/RefreshControl.svelte
···
</script>
<div
-
class="wsbadge !pl-2 !px-1 !mt-0 !font-normal bg-green-100 hover:bg-green-200 border-green-300"
>
-
<label for="refresh-rate" class="text-green-800 mr-1">refresh:</label>
<input
id="refresh-rate"
value={refreshRate}
···
pattern="[0-9]*"
min="0"
placeholder="real-time"
-
class="bg-green-50 text-green-900 placeholder-green-400 border border-green-200 rounded-full px-1 outline-none focus:bg-white focus:border-green-400 min-w-0 w-20"
/>
-
<span class="text-green-700">s</span>
</div>
-
-
<style lang="postcss">
-
@reference "../../app.css";
-
.wsbadge {
-
@apply text-sm font-semibold mt-1.5 px-2.5 py-0.5 rounded-full border;
-
}
-
</style>
···
</script>
<div
+
class="wsbadge !pl-2 !px-1 !mt-0 !font-normal bg-lime-100 dark:bg-lime-900 dark:hover:bg-lime-800 hover:bg-lime-200 border-lime-300 dark:border-lime-700"
>
+
<label for="refresh-rate" class="text-lime-800 dark:text-lime-200 mr-1"
+
>refresh:</label
+
>
<input
id="refresh-rate"
value={refreshRate}
···
pattern="[0-9]*"
min="0"
placeholder="real-time"
+
class="bg-green-50 dark:bg-green-900 text-lime-900 dark:text-lime-200 placeholder-lime-600 dark:placeholder-lime-400 border border-lime-200 dark:border-lime-700 rounded-full px-1 outline-none focus:border-lime-400 min-w-0 w-20"
/>
+
<span class="text-lime-800 dark:text-lime-200">s</span>
</div>
+31
client/src/lib/components/ShowControls.svelte
···
···
+
<script lang="ts">
+
import type { ShowOption } from "$lib/types";
+
+
interface Props {
+
show: ShowOption;
+
onShowChange: (value: ShowOption) => void;
+
}
+
+
let { show, onShowChange }: Props = $props();
+
+
const showOptions: ShowOption[] = ["server init", "stream start"];
+
</script>
+
+
<div
+
class="wsbadge !pl-2 !px-1 !mt-0 !font-normal bg-pink-100 dark:bg-pink-800 hover:bg-pink-200 dark:hover:bg-pink-700 border-pink-300 dark:border-pink-700"
+
>
+
<label for="show" class="text-pink-800 dark:text-pink-100 mr-1">
+
show since:
+
</label>
+
<select
+
id="show"
+
value={show}
+
onchange={(e) =>
+
onShowChange((e.target as HTMLSelectElement).value as ShowOption)}
+
class="bg-pink-50 dark:bg-pink-900 text-pink-900 dark:text-pink-100 border border-pink-200 dark:border-pink-700 rounded-full px-1 outline-none focus:border-pink-400 min-w-0"
+
>
+
{#each showOptions as option}
+
<option value={option}>{option}</option>
+
{/each}
+
</select>
+
</div>
+5 -10
client/src/lib/components/SortControls.svelte
···
</script>
<div
-
class="wsbadge !pl-2 !px-1 !mt-0 !font-normal bg-purple-100 hover:bg-purple-200 border-purple-300"
>
-
<label for="sort-by" class="text-purple-800 mr-1"> sort by: </label>
<select
id="sort-by"
value={sortBy}
onchange={(e) =>
onSortChange((e.target as HTMLSelectElement).value as SortOption)}
-
class="bg-purple-50 text-purple-900 border border-purple-200 rounded-full px-1 outline-none focus:bg-white focus:border-purple-400 min-w-0"
>
{#each sortOptions as option}
<option value={option.value}>{option.label}</option>
{/each}
</select>
</div>
-
-
<style lang="postcss">
-
@reference "../../app.css";
-
.wsbadge {
-
@apply text-sm font-semibold mt-1.5 px-2.5 py-0.5 rounded-full border;
-
}
-
</style>
···
</script>
<div
+
class="wsbadge !pl-2 !px-1 !mt-0 !font-normal bg-purple-100 dark:bg-purple-800 hover:bg-purple-200 dark:hover:bg-purple-700 border-purple-300 dark:border-purple-700"
>
+
<label for="sort-by" class="text-purple-800 dark:text-purple-300 mr-1">
+
sort by:
+
</label>
<select
id="sort-by"
value={sortBy}
onchange={(e) =>
onSortChange((e.target as HTMLSelectElement).value as SortOption)}
+
class="bg-purple-50 dark:bg-purple-900 text-purple-900 dark:text-purple-300 border border-purple-200 dark:border-purple-700 rounded-full px-1 outline-none focus:border-purple-400 min-w-0"
>
{#each sortOptions as option}
<option value={option.value}>{option.label}</option>
{/each}
</select>
</div>
+17 -18
client/src/lib/components/StatsCard.svelte
···
<script lang="ts">
import { formatNumber } from "$lib/format";
-
import NumberFlow from "@number-flow/svelte";
const colorClasses = {
green: {
-
bg: "from-green-50 to-green-100",
-
border: "border-green-200",
-
titleText: "text-green-700",
-
valueText: "text-green-900",
},
red: {
-
bg: "from-red-50 to-red-100",
-
border: "border-red-200",
-
titleText: "text-red-700",
-
valueText: "text-red-900",
},
turqoise: {
-
bg: "from-teal-50 to-teal-100",
-
border: "border-teal-200",
-
titleText: "text-teal-700",
-
valueText: "text-teal-900",
},
orange: {
-
bg: "from-orange-50 to-orange-100",
-
border: "border-orange-200",
-
titleText: "text-orange-700",
-
valueText: "text-orange-900",
},
};
···
{title}
</h3>
<p class="text-xl md:text-2xl font-bold {colors.valueText}">
-
<NumberFlow {value} />
</p>
</div>
···
<script lang="ts">
import { formatNumber } from "$lib/format";
const colorClasses = {
green: {
+
bg: "from-green-50 to-green-100 dark:from-green-900 dark:to-green-800",
+
border: "border-green-200 dark:border-green-800",
+
titleText: "text-green-700 dark:text-green-400",
+
valueText: "text-green-900 dark:text-green-200",
},
red: {
+
bg: "from-red-50 to-red-100 dark:from-red-900 dark:to-red-800",
+
border: "border-red-200 dark:border-red-800",
+
titleText: "text-red-700 dark:text-red-400",
+
valueText: "text-red-900 dark:text-red-200",
},
turqoise: {
+
bg: "from-teal-50 to-teal-100 dark:from-teal-900 dark:to-teal-800",
+
border: "border-teal-200 dark:border-teal-800",
+
titleText: "text-teal-700 dark:text-teal-400",
+
valueText: "text-teal-900 dark:text-teal-200",
},
orange: {
+
bg: "from-orange-50 to-orange-100 dark:from-orange-900 dark:to-orange-800",
+
border: "border-orange-200 dark:border-orange-800",
+
titleText: "text-orange-700 dark:text-orange-400",
+
valueText: "text-orange-900 dark:text-orange-200",
},
};
···
{title}
</h3>
<p class="text-xl md:text-2xl font-bold {colors.valueText}">
+
{formatNumber(value)}
</p>
</div>
+18 -14
client/src/lib/components/StatusBadge.svelte
···
const statusConfig = {
connected: {
text: "stream live",
-
classes: "bg-green-100 text-green-800 border-green-200",
},
connecting: {
text: "stream connecting",
-
classes: "bg-yellow-100 text-yellow-800 border-yellow-200",
},
error: {
text: "stream errored",
-
classes: "bg-red-100 text-red-800 border-red-200",
},
disconnected: {
text: "stream offline",
-
classes: "bg-gray-100 text-gray-800 border-gray-200",
},
};
const config = $derived(statusConfig[status]);
</script>
-
<span class="wsbadge {config.classes}">
-
{config.text}
-
</span>
-
-
<style lang="postcss">
-
@reference "../../app.css";
-
.wsbadge {
-
@apply text-sm font-semibold mt-1.5 px-2.5 py-0.5 rounded-full border;
-
}
-
</style>
···
const statusConfig = {
connected: {
text: "stream live",
+
classes:
+
"bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200 border-green-200 dark:border-green-800",
},
connecting: {
text: "stream connecting",
+
classes:
+
"bg-yellow-100 dark:bg-yellow-900 text-yellow-800 dark:text-yellow-200 border-yellow-200 dark:border-yellow-800",
},
error: {
text: "stream errored",
+
classes:
+
"bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200 border-red-200 dark:border-red-800",
},
disconnected: {
text: "stream offline",
+
classes:
+
"bg-gray-100 dark:bg-gray-900 text-gray-800 dark:text-gray-200 border-gray-200 dark:border-gray-800",
},
};
const config = $derived(statusConfig[status]);
</script>
+
<div class="flex flex-row items-center gap-2 wsbadge {config.classes}">
+
<!-- connecting spinner -->
+
{#if status === "connecting"}
+
<div
+
class="animate-spin rounded-full h-4 w-4 border-b-2 border-yellow-800 dark:border-yellow-200"
+
></div>
+
{/if}
+
<!-- status text -->
+
<span>{config.text}</span>
+
</div>
+5 -1
client/src/lib/format.ts
···
return num.toLocaleString();
};
export const formatTimestamp = (timestamp: number): string => {
-
return new Date(timestamp / 1000).toLocaleString();
};
···
return num.toLocaleString();
};
+
const isValidDate = (d: Date) => d instanceof Date && !isNaN(d.getTime());
export const formatTimestamp = (timestamp: number): string => {
+
const date = new Date(timestamp * 1000);
+
return isValidDate(date)
+
? date.toLocaleString()
+
: new Date(timestamp / 1000).toLocaleString();
};
+1
client/src/lib/types.ts
···
};
export type SortOption = "total" | "created" | "deleted" | "date";
···
};
export type SortOption = "total" | "created" | "deleted" | "date";
+
export type ShowOption = "server init" | "stream start";
+3 -2
client/src/routes/+layout.ts
···
-
export const prerender = true;
-
export const ssr = false;
···
+
export const prerender = false;
+
export const ssr = true;
+
export const csr = true;
+7
client/src/routes/+page.server.ts
···
···
+
import { fetchEvents, fetchTrackingSince } from "$lib/api";
+
+
export const load = async () => {
+
const events = await fetchEvents();
+
const trackingSince = await fetchTrackingSince();
+
return { events, trackingSince };
+
};
+109 -51
client/src/routes/+page.svelte
···
<script lang="ts">
import { dev } from "$app/environment";
-
import type { EventRecord, NsidCount, SortOption } from "$lib/types";
import { onMount, onDestroy } from "svelte";
-
import { writable } from "svelte/store";
import { PUBLIC_API_URL } from "$env/static/public";
import { fetchEvents, fetchTrackingSince } from "$lib/api";
import { createRegexFilter } from "$lib/filter";
···
import BskyToggle from "$lib/components/BskyToggle.svelte";
import RefreshControl from "$lib/components/RefreshControl.svelte";
import { formatTimestamp } from "$lib/format";
-
const events = writable(new Map<string, EventRecord>());
const pendingUpdates = new Map<string, EventRecord>();
-
let eventsList: NsidCount[] = $state([]);
let updateTimer: NodeJS.Timeout | null = null;
-
events.subscribe((value) => {
-
eventsList = value
-
.entries()
-
.map(([nsid, event]) => ({
-
nsid,
-
...event,
-
}))
-
.toArray();
-
});
-
let per_second = $state(0);
-
let tracking_since = $state(0);
let all: EventRecord = $derived(
eventsList.reduce(
(acc, event) => {
···
},
),
);
-
let error: string | null = $state(null);
-
let filterRegex = $state("");
-
let dontShowBsky = $state(false);
-
let sortBy: SortOption = $state("total");
-
let refreshRate = $state("");
-
let changedByUser = $state(false);
let websocket: WebSocket | null = null;
let isStreamOpen = $state(false);
···
};
websocket.onmessage = async (event) => {
const jsonData = JSON.parse(event.data);
-
-
if (jsonData.per_second > 0) {
-
per_second = jsonData.per_second;
-
}
-
-
// Store updates in pending map if refresh rate is set
if (refreshRate) {
for (const [nsid, event] of Object.entries(jsonData.events)) {
pendingUpdates.set(nsid, event as EventRecord);
}
} else {
-
// Apply updates immediately if no refresh rate
-
events.update((map) => {
-
for (const [nsid, event] of Object.entries(
-
jsonData.events,
-
)) {
-
map.set(nsid, event as EventRecord);
-
}
-
return map;
-
});
}
};
websocket.onerror = (error) => {
···
error = null;
const data = await fetchEvents();
per_second = data.per_second;
-
events.update((map) => {
-
for (const [nsid, event] of Object.entries(data.events)) {
-
map.set(nsid, event);
-
}
-
return map;
-
});
tracking_since = (await fetchTrackingSince()).since;
} catch (err) {
error =
···
/>
</svelte:head>
-
<header class="border-gray-300 border-b mb-4 pb-2">
<div
class="px-2 md:ml-[19vw] mx-auto flex flex-wrap items-center text-center"
>
-
<h1 class="text-4xl font-bold mr-4 text-gray-900">lexicon tracker</h1>
-
<p class="text-lg mt-1 text-gray-600">
tracks lexicons seen on the jetstream {tracking_since === 0
? ""
: `(since: ${formatTimestamp(tracking_since)})`}
</p>
</div>
</header>
-
<div class="md:max-w-[61vw] mx-auto p-2">
<div class="min-w-fit grid grid-cols-2 xl:grid-cols-4 gap-2 2xl:gap-6 mb-8">
<StatsCard
title="total creation"
···
{#if error}
<div
-
class="bg-red-100 border border-red-300 text-red-700 px-4 py-3 rounded-lg mb-6"
>
<p>Error: {error}</p>
</div>
···
{#if eventsList.length > 0}
<div class="mb-8">
<div class="flex flex-wrap items-center gap-3 mb-3">
-
<h2 class="text-2xl font-bold text-gray-900">seen lexicons</h2>
<StatusBadge status={websocketStatus} />
</div>
<div class="flex flex-wrap items-center gap-1.5 mb-6">
···
refreshRate = "";
}}
/>
<RefreshControl
{refreshRate}
onRefreshChange={(value) => {
···
{/if}
</div>
-
<footer class="py-2 border-t border-gray-200 text-center">
-
<p class="text-gray-600 text-sm">
source code <a
href="https://tangled.sh/@poor.dog/nsid-tracker"
target="_blank"
rel="noopener noreferrer"
-
class="text-blue-600 hover:text-blue-800 underline"
>@poor.dog/nsid-tracker</a
>
</p>
···
<script lang="ts">
import { dev } from "$app/environment";
+
import type {
+
EventRecord,
+
Events,
+
NsidCount,
+
ShowOption,
+
Since,
+
SortOption,
+
} from "$lib/types";
import { onMount, onDestroy } from "svelte";
+
import { get, writable } from "svelte/store";
import { PUBLIC_API_URL } from "$env/static/public";
import { fetchEvents, fetchTrackingSince } from "$lib/api";
import { createRegexFilter } from "$lib/filter";
···
import BskyToggle from "$lib/components/BskyToggle.svelte";
import RefreshControl from "$lib/components/RefreshControl.svelte";
import { formatTimestamp } from "$lib/format";
+
import ShowControls from "$lib/components/ShowControls.svelte";
+
+
type Props = {
+
data: { events: Events; trackingSince: Since };
+
};
+
const { data }: Props = $props();
+
+
const events = writable(
+
new Map<string, EventRecord>(Object.entries(data.events.events)),
+
);
+
const eventsStart = new Map<string, EventRecord>(
+
Object.entries(data.events.events),
+
);
const pendingUpdates = new Map<string, EventRecord>();
+
let updateTimer: NodeJS.Timeout | null = null;
+
let per_second = $state(data.events.per_second);
+
let tracking_since = $state(data.trackingSince.since);
+
const diffEvents = (
+
oldEvents: Map<string, EventRecord>,
+
newEvents: Map<string, EventRecord>,
+
): NsidCount[] => {
+
const nsidCounts: NsidCount[] = [];
+
for (const [nsid, event] of newEvents.entries()) {
+
const oldEvent = oldEvents.get(nsid);
+
if (oldEvent) {
+
const counts = {
+
nsid,
+
count: event.count - oldEvent.count,
+
deleted_count: event.deleted_count - oldEvent.deleted_count,
+
last_seen: event.last_seen,
+
};
+
if (counts.count > 0 || counts.deleted_count > 0)
+
nsidCounts.push(counts);
+
} else {
+
nsidCounts.push({
+
nsid,
+
...event,
+
});
+
}
+
}
+
return nsidCounts;
+
};
+
const applyEvents = (newEvents: Record<string, EventRecord>) => {
+
events.update((map) => {
+
for (const [nsid, event] of Object.entries(newEvents)) {
+
map.set(nsid, event);
+
}
+
return map;
+
});
+
};
+
+
let error: string | null = $state(null);
+
let filterRegex = $state("");
+
let dontShowBsky = $state(false);
+
let sortBy: SortOption = $state("total");
+
let refreshRate = $state("");
+
let changedByUser = $state(false);
+
let show: ShowOption = $state("server init");
+
let eventsList: NsidCount[] = $state([]);
+
let updateEventsList = $derived((value: Map<string, EventRecord>) => {
+
switch (show) {
+
case "server init":
+
eventsList = value
+
.entries()
+
.map(([nsid, event]) => ({
+
nsid,
+
...event,
+
}))
+
.toArray();
+
break;
+
case "stream start":
+
eventsList = diffEvents(eventsStart, value);
+
break;
+
}
+
});
+
events.subscribe((value) => updateEventsList(value));
let all: EventRecord = $derived(
eventsList.reduce(
(acc, event) => {
···
},
),
);
let websocket: WebSocket | null = null;
let isStreamOpen = $state(false);
···
};
websocket.onmessage = async (event) => {
const jsonData = JSON.parse(event.data);
+
per_second = jsonData.per_second;
if (refreshRate) {
for (const [nsid, event] of Object.entries(jsonData.events)) {
pendingUpdates.set(nsid, event as EventRecord);
}
} else {
+
applyEvents(jsonData.events);
}
};
websocket.onerror = (error) => {
···
error = null;
const data = await fetchEvents();
per_second = data.per_second;
+
applyEvents(data.events);
tracking_since = (await fetchTrackingSince()).since;
} catch (err) {
error =
···
/>
</svelte:head>
+
<header
+
class="bg-white dark:bg-gray-900 border-gray-300 dark:border-gray-950 border-b mb-4 pb-2"
+
>
<div
class="px-2 md:ml-[19vw] mx-auto flex flex-wrap items-center text-center"
>
+
<h1 class="text-4xl font-bold mr-4 text-gray-900 dark:text-gray-200">
+
lexicon tracker
+
</h1>
+
<p class="text-lg mt-1 text-gray-600 dark:text-gray-300">
tracks lexicons seen on the jetstream {tracking_since === 0
? ""
: `(since: ${formatTimestamp(tracking_since)})`}
</p>
</div>
</header>
+
<div class="bg-white dark:bg-gray-900 md:max-w-[61vw] mx-auto p-2">
<div class="min-w-fit grid grid-cols-2 xl:grid-cols-4 gap-2 2xl:gap-6 mb-8">
<StatsCard
title="total creation"
···
{#if error}
<div
+
class="bg-red-100 dark:bg-red-900 border border-red-300 dark:border-red-700 text-red-700 dark:text-red-200 px-4 py-3 rounded-lg mb-6"
>
<p>Error: {error}</p>
</div>
···
{#if eventsList.length > 0}
<div class="mb-8">
<div class="flex flex-wrap items-center gap-3 mb-3">
+
<h2 class="text-2xl font-bold text-gray-900 dark:text-gray-200">
+
seen lexicons
+
</h2>
<StatusBadge status={websocketStatus} />
</div>
<div class="flex flex-wrap items-center gap-1.5 mb-6">
···
refreshRate = "";
}}
/>
+
<ShowControls
+
{show}
+
onShowChange={(value: ShowOption) => {
+
show = value;
+
updateEventsList(get(events));
+
}}
+
/>
<RefreshControl
{refreshRate}
onRefreshChange={(value) => {
···
{/if}
</div>
+
<footer class="py-2 border-t border-gray-200 dark:border-gray-800 text-center">
+
<p class="text-gray-600 dark:text-gray-200 text-sm">
source code <a
href="https://tangled.sh/@poor.dog/nsid-tracker"
target="_blank"
rel="noopener noreferrer"
+
class="text-blue-600 dark:text-blue-400 hover:text-blue-800 dark:hover:text-blue-600 underline"
>@poor.dog/nsid-tracker</a
>
</p>
+1 -1
client/svelte.config.js
···
-
import adapter from "@sveltejs/adapter-static";
import { vitePreprocess } from "@sveltejs/vite-plugin-svelte";
/** @type {import('@sveltejs/kit').Config} */
···
+
import adapter from "svelte-adapter-bun";
import { vitePreprocess } from "@sveltejs/vite-plugin-svelte";
/** @type {import('@sveltejs/kit').Config} */
+1 -1
nix/client-modules.nix
···
src = ../client;
-
outputHash = "sha256-t8PJFo+3XGkzmMNbw9Rf9cS5Ob5YtI8ucX3ay+u9a3M=";
outputHashAlgo = "sha256";
outputHashMode = "recursive";
···
src = ../client;
+
outputHash = "sha256-njwXk3u0NUsYWLv9EOdCltgQOjTVkcfu+D+0COSw/6I=";
outputHashAlgo = "sha256";
outputHashMode = "recursive";
+10 -2
nix/client.nix
···
{
stdenv,
makeBinaryWrapper,
bun,
···
'';
buildPhase = ''
runHook preBuild
-
bun --prefer-offline run --bun build
runHook postBuild
'';
installPhase = ''
runHook preInstall
-
mkdir -p $out
cp -R ./build/* $out
runHook postInstall
'';
}
···
{
+
lib,
stdenv,
makeBinaryWrapper,
bun,
···
'';
buildPhase = ''
runHook preBuild
+
bun --prefer-offline run build
runHook postBuild
'';
installPhase = ''
runHook preInstall
+
+
mkdir -p $out/bin
cp -R ./build/* $out
+
cp -R ./node_modules $out
+
+
makeBinaryWrapper ${bun}/bin/bun $out/bin/website \
+
--prefix PATH : ${lib.makeBinPath [ bun ]} \
+
--add-flags "run --bun --no-install --cwd $out start"
+
runHook postInstall
'';
}
+42 -28
server/Cargo.lock
···
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "aho-corasick"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
···
version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
[[package]]
name = "async-compression"
···
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
-
-
[[package]]
-
name = "cmake"
-
version = "0.1.54"
-
source = "registry+https://github.com/rust-lang/crates.io-index"
-
checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0"
-
dependencies = [
-
"cc",
-
]
[[package]]
name = "combine"
···
name = "server"
version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"axum",
"axum-tws",
···
"serde",
"serde_json",
"smol_str",
-
"snmalloc-rs",
"threadpool",
"tikv-jemallocator",
"tokio",
···
dependencies = [
"borsh",
"serde",
-
]
-
-
[[package]]
-
name = "snmalloc-rs"
-
version = "0.3.8"
-
source = "registry+https://github.com/rust-lang/crates.io-index"
-
checksum = "eb317153089fdfa4d8a2eec059d40a5a23c3bde43995ea23b19121c3f621e74a"
-
dependencies = [
-
"snmalloc-sys",
-
]
-
-
[[package]]
-
name = "snmalloc-sys"
-
version = "0.3.8"
-
source = "registry+https://github.com/rust-lang/crates.io-index"
-
checksum = "065fea53d32bb77bc36cca466cb191f2e5216ebfd0ed360b1d64889ee6e559ea"
-
dependencies = [
-
"cmake",
]
[[package]]
···
version = "0.8.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3"
[[package]]
name = "zeroize"
···
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
+
name = "ahash"
+
version = "0.8.12"
+
source = "registry+https://github.com/rust-lang/crates.io-index"
+
checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
+
dependencies = [
+
"cfg-if",
+
"getrandom 0.3.3",
+
"once_cell",
+
"serde",
+
"version_check",
+
"zerocopy",
+
]
+
+
[[package]]
name = "aho-corasick"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
···
version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
+
+
[[package]]
+
name = "arc-swap"
+
version = "1.7.1"
+
source = "registry+https://github.com/rust-lang/crates.io-index"
+
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
[[package]]
name = "async-compression"
···
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "combine"
···
name = "server"
version = "0.1.0"
dependencies = [
+
"ahash",
"anyhow",
+
"arc-swap",
"async-trait",
"axum",
"axum-tws",
···
"serde",
"serde_json",
"smol_str",
"threadpool",
"tikv-jemallocator",
"tokio",
···
dependencies = [
"borsh",
"serde",
]
[[package]]
···
version = "0.8.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3"
+
+
[[package]]
+
name = "zerocopy"
+
version = "0.8.26"
+
source = "registry+https://github.com/rust-lang/crates.io-index"
+
checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f"
+
dependencies = [
+
"zerocopy-derive",
+
]
+
+
[[package]]
+
name = "zerocopy-derive"
+
version = "0.8.26"
+
source = "registry+https://github.com/rust-lang/crates.io-index"
+
checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181"
+
dependencies = [
+
"proc-macro2",
+
"quote",
+
"syn",
+
]
[[package]]
name = "zeroize"
+2 -2
server/Cargo.toml
···
rayon = "1.10.0"
parking_lot = { version = "0.12", features = ["send_guard", "hardware-lock-elision"] }
rclite = "0.2.7"
-
[target.'cfg(target_env = "msvc")'.dependencies]
-
snmalloc-rs = "0.3.8"
[target.'cfg(not(target_env = "msvc"))'.dependencies]
tikv-jemallocator = "0.6"
···
rayon = "1.10.0"
parking_lot = { version = "0.12", features = ["send_guard", "hardware-lock-elision"] }
rclite = "0.2.7"
+
arc-swap = "1.7.1"
+
ahash = { version = "0.8.12", features = ["serde"] }
[target.'cfg(not(target_env = "msvc"))'.dependencies]
tikv-jemallocator = "0.6"
+16 -18
server/src/api.rs
···
use std::{
-
collections::HashMap,
fmt::Display,
net::SocketAddr,
ops::{Bound, Deref, RangeBounds},
time::Duration,
};
use anyhow::anyhow;
use axum::{
Json, Router,
···
#[derive(Serialize)]
struct Events {
per_second: usize,
-
events: HashMap<SmolStr, NsidCount>,
}
async fn events(db: State<Arc<Db>>) -> AppResult<Json<Events>> {
-
let mut events = HashMap::new();
for result in db.get_counts() {
let (nsid, counts) = result?;
events.insert(
···
) -> AppResult<Json<Vec<Hit>>> {
let from = params.to.map(Bound::Included).unwrap_or(Bound::Unbounded);
let to = params.from.map(Bound::Included).unwrap_or(Bound::Unbounded);
-
let maybe_hits = db
-
.get_hits(&params.nsid, HitsRange { from, to })
-
.take(MAX_HITS);
-
let mut hits = Vec::with_capacity(maybe_hits.size_hint().0);
-
for maybe_hit in maybe_hits {
-
let hit = maybe_hit?;
-
let hit_data = hit.deser()?;
-
-
hits.push(Hit {
-
timestamp: hit.timestamp,
-
deleted: hit_data.deleted,
-
});
-
}
-
Ok(Json(hits))
}
async fn stream_events(db: State<Arc<Db>>, ws: WebSocketUpgrade) -> Response {
···
(async move {
let mut listener = db.new_listener();
let mut data = Events {
-
events: HashMap::<SmolStr, NsidCount>::with_capacity(10),
per_second: 0,
};
let mut updates = 0;
···
use std::{
fmt::Display,
net::SocketAddr,
ops::{Bound, Deref, RangeBounds},
time::Duration,
};
+
use ahash::AHashMap;
use anyhow::anyhow;
use axum::{
Json, Router,
···
#[derive(Serialize)]
struct Events {
per_second: usize,
+
events: AHashMap<SmolStr, NsidCount>,
}
async fn events(db: State<Arc<Db>>) -> AppResult<Json<Events>> {
+
let mut events = AHashMap::new();
for result in db.get_counts() {
let (nsid, counts) = result?;
events.insert(
···
) -> AppResult<Json<Vec<Hit>>> {
let from = params.to.map(Bound::Included).unwrap_or(Bound::Unbounded);
let to = params.from.map(Bound::Included).unwrap_or(Bound::Unbounded);
+
db.get_hits(&params.nsid, HitsRange { from, to }, MAX_HITS)
+
.take(MAX_HITS)
+
.try_fold(Vec::with_capacity(MAX_HITS), |mut acc, hit| {
+
let hit = hit?;
+
let hit_data = hit.deser()?;
+
acc.push(Hit {
+
timestamp: hit.timestamp,
+
deleted: hit_data.deleted,
+
});
+
Ok(acc)
+
})
+
.map(Json)
}
async fn stream_events(db: State<Arc<Db>>, ws: WebSocketUpgrade) -> Response {
···
(async move {
let mut listener = db.new_listener();
let mut data = Events {
+
events: AHashMap::<SmolStr, NsidCount>::with_capacity(10),
per_second: 0,
};
let mut updates = 0;
+37 -18
server/src/db/handle.rs
···
use std::{
fmt::Debug,
io::Cursor,
-
ops::{Bound, Deref, RangeBounds},
sync::atomic::{AtomicU64, Ordering as AtomicOrdering},
time::Duration,
};
use byteview::ByteView;
-
use fjall::{Keyspace, Partition, PartitionCreateOptions, Slice};
use itertools::Itertools;
use parking_lot::Mutex;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
···
use crate::{
db::{EventRecord, NsidHit, block},
-
error::AppResult,
-
utils::{CLOCK, DefaultRateTracker, RateTracker, ReadVariableExt, varints_unsigned_encoded},
};
pub type ItemDecoder = block::ItemDecoder<Cursor<Slice>, NsidHit>;
···
}
pub struct LexiconHandle {
-
tree: Partition,
nsid: SmolStr,
buf: Arc<Mutex<Vec<EventRecord>>>,
last_insert: AtomicU64, // relaxed
···
}
}
-
impl Deref for LexiconHandle {
-
type Target = Partition;
-
-
fn deref(&self) -> &Self::Target {
-
&self.tree
-
}
-
}
-
impl LexiconHandle {
pub fn new(keyspace: &Keyspace, nsid: &str) -> Self {
let opts = PartitionCreateOptions::default()
-
.block_size(1024 * 128)
.compression(fjall::CompressionType::Miniz(9));
Self {
-
tree: keyspace.open_partition(nsid, opts).unwrap(),
nsid: nsid.into(),
buf: Default::default(),
last_insert: AtomicU64::new(0),
···
}
}
pub fn span(&self) -> tracing::Span {
tracing::info_span!("handle", nsid = %self.nsid)
}
pub fn nsid(&self) -> &SmolStr {
&self.nsid
}
pub fn item_count(&self) -> usize {
self.buf.lock().len()
}
···
let end_key = varints_unsigned_encoded([end_limit]);
let blocks_to_compact = self
-
.tree
.range(start_key..end_key)
.collect::<Result<Vec<_>, _>>()?;
if blocks_to_compact.len() < 2 {
···
let end_blocks_size = new_blocks.len();
for key in keys_to_delete {
-
self.tree.remove(key.clone())?;
}
for block in new_blocks {
-
self.tree.insert(block.key, block.data)?;
}
let reduction =
···
);
Ok(())
}
pub fn encode_block_from_items(
···
use std::{
fmt::Debug,
io::Cursor,
+
ops::{Bound, RangeBounds},
sync::atomic::{AtomicU64, Ordering as AtomicOrdering},
time::Duration,
};
use byteview::ByteView;
+
use fjall::{Keyspace, Partition, PartitionCreateOptions, Slice, Snapshot};
use itertools::Itertools;
use parking_lot::Mutex;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
···
use crate::{
db::{EventRecord, NsidHit, block},
+
error::{AppError, AppResult},
+
utils::{
+
ArcRefCnt, ArcliteSwap, CLOCK, DefaultRateTracker, RateTracker, ReadVariableExt,
+
varints_unsigned_encoded,
+
},
};
pub type ItemDecoder = block::ItemDecoder<Cursor<Slice>, NsidHit>;
···
}
pub struct LexiconHandle {
+
write_tree: Partition,
+
read_tree: ArcliteSwap<Snapshot>,
nsid: SmolStr,
buf: Arc<Mutex<Vec<EventRecord>>>,
last_insert: AtomicU64, // relaxed
···
}
}
impl LexiconHandle {
pub fn new(keyspace: &Keyspace, nsid: &str) -> Self {
let opts = PartitionCreateOptions::default()
+
.block_size(1024 * 48)
.compression(fjall::CompressionType::Miniz(9));
+
let write_tree = keyspace.open_partition(nsid, opts).unwrap();
+
let read_tree = ArcliteSwap::new(ArcRefCnt::new(write_tree.snapshot()));
Self {
+
write_tree,
+
read_tree,
nsid: nsid.into(),
buf: Default::default(),
last_insert: AtomicU64::new(0),
···
}
}
+
#[inline(always)]
+
pub fn read(&self) -> arc_swap::Guard<ArcRefCnt<Snapshot>> {
+
self.read_tree.load()
+
}
+
+
#[inline(always)]
+
pub fn update_tree(&self) {
+
self.read_tree
+
.store(ArcRefCnt::new(self.write_tree.snapshot()));
+
}
+
+
#[inline(always)]
pub fn span(&self) -> tracing::Span {
tracing::info_span!("handle", nsid = %self.nsid)
}
+
#[inline(always)]
pub fn nsid(&self) -> &SmolStr {
&self.nsid
}
+
#[inline(always)]
pub fn item_count(&self) -> usize {
self.buf.lock().len()
}
···
let end_key = varints_unsigned_encoded([end_limit]);
let blocks_to_compact = self
+
.read()
.range(start_key..end_key)
.collect::<Result<Vec<_>, _>>()?;
if blocks_to_compact.len() < 2 {
···
let end_blocks_size = new_blocks.len();
for key in keys_to_delete {
+
self.write_tree.remove(key.clone())?;
}
for block in new_blocks {
+
self.write_tree.insert(block.key, block.data)?;
}
let reduction =
···
);
Ok(())
+
}
+
+
pub fn insert_block(&self, block: Block) -> AppResult<()> {
+
self.write_tree
+
.insert(block.key, block.data)
+
.map_err(AppError::from)
}
pub fn encode_block_from_items(
+115 -56
server/src/db/mod.rs
···
use std::{
-
collections::HashMap,
fmt::Debug,
io::Cursor,
ops::{Bound, Deref, RangeBounds},
path::Path,
time::Duration,
};
use byteview::StrView;
use fjall::{Keyspace, Partition, PartitionCreateOptions};
use itertools::{Either, Itertools};
···
}
pub struct DbInfo {
-
pub nsids: HashMap<SmolStr, Vec<usize>>,
pub disk_size: u64,
}
···
impl Default for DbConfig {
fn default() -> Self {
Self {
-
ks_config: fjall::Config::default(),
min_block_size: 1000,
-
max_block_size: 1_000_000,
max_last_activity: Duration::from_secs(10),
}
}
···
pub cfg: DbConfig,
pub ks: Keyspace,
counts: Partition,
-
hits: scc::HashIndex<SmolStr, Arc<LexiconHandle>>,
sync_pool: threadpool::ThreadPool,
event_broadcaster: broadcast::Sender<(SmolStr, NsidCounts)>,
eps: RateTracker<100>, // 100 millis buckets
···
pub fn sync(&self, all: bool) -> AppResult<()> {
let start = CLOCK.now();
// prepare all the data
-
let mut data = Vec::with_capacity(self.hits.len());
let _guard = scc::ebr::Guard::new();
-
for (_, handle) in self.hits.iter(&_guard) {
let mut nsid_data = Vec::with_capacity(2);
-
let mut total_count = 0;
let is_too_old = handle.since_last_activity() > self.cfg.max_last_activity;
// if we disconnect for a long time, we want to sync all of what we
// have to avoid having many small blocks (even if we run compaction
···
if count > 0 && (all || data_count > 0 || is_too_old) {
for _ in 0..data_count {
nsid_data.push((handle.clone(), block_size));
-
total_count += block_size;
}
// only sync remainder if we haven't met block size
let remainder = count % block_size;
if (all || data_count == 0) && remainder > 0 {
nsid_data.push((handle.clone(), remainder));
-
total_count += remainder;
}
}
let _span = handle.span().entered();
-
tracing::info!(
-
{blocks = %nsid_data.len(), count = %total_count},
-
"will encode & sync",
-
);
-
data.push(nsid_data);
}
drop(_guard);
···
for (block, handle) in chunk {
self.sync_pool.execute(move || {
let _span = handle.span().entered();
-
match handle.insert(block.key, block.data) {
Ok(_) => {
-
tracing::info!({count = %block.written}, "synced")
}
Err(err) => tracing::error!({ err = %err }, "failed to sync block"),
}
···
AppResult::Ok(())
})?;
self.sync_pool.join();
tracing::info!(time = %start.elapsed().as_secs_f64(), "synced all blocks");
Ok(())
···
let Some(handle) = self.get_handle(nsid) else {
return Ok(());
};
-
handle.compact(max_count, range, sort)
}
pub fn compact_all(
···
pub fn major_compact(&self) -> AppResult<()> {
self.compact_all(self.cfg.max_block_size, .., true)?;
-
let _guard = scc::ebr::Guard::new();
-
for (_, handle) in self.hits.iter(&_guard) {
-
handle.deref().major_compact()?;
-
}
Ok(())
}
···
}
pub fn ingest_events(&self, events: impl Iterator<Item = EventRecord>) -> AppResult<()> {
for (key, chunk) in events.chunk_by(|event| event.nsid.clone()).into_iter() {
let mut counts = self.get_count(&key)?;
-
let mut count = 0;
self.ensure_handle(&key).queue(chunk.inspect(|e| {
// increment count
counts.last_seen = e.timestamp;
···
} else {
counts.count += 1;
}
-
count += 1;
}));
-
self.eps.observe(count);
self.insert_count(&key, &counts)?;
if self.event_broadcaster.receiver_count() > 0 {
let _ = self.event_broadcaster.send((key, counts));
}
}
Ok(())
}
···
}
pub fn info(&self) -> AppResult<DbInfo> {
-
let mut nsids = HashMap::new();
for nsid in self.get_nsids() {
let Some(handle) = self.get_handle(&nsid) else {
continue;
};
-
let block_lens = handle.iter().rev().try_fold(Vec::new(), |mut acc, item| {
-
let (key, value) = item?;
-
let mut timestamps = Cursor::new(key);
-
let start_timestamp = timestamps.read_varint()?;
-
let decoder = ItemDecoder::new(Cursor::new(value), start_timestamp)?;
-
acc.push(decoder.item_count());
-
AppResult::Ok(acc)
-
})?;
nsids.insert(nsid.to_smolstr(), block_lens);
}
Ok(DbInfo {
···
&self,
nsid: &str,
range: impl RangeBounds<u64> + std::fmt::Debug,
) -> impl Iterator<Item = AppResult<handle::Item>> {
let start_limit = match range.start_bound().cloned() {
Bound::Included(start) => start,
···
return Either::Right(std::iter::empty());
};
-
let map_block = move |(key, val)| {
let mut key_reader = Cursor::new(key);
let start_timestamp = key_reader.read_varint::<u64>()?;
if start_timestamp < start_limit {
-
return Ok(None);
}
-
let items = handle::ItemDecoder::new(Cursor::new(val), start_timestamp)?
-
.take_while(move |item| {
-
item.as_ref().map_or(true, |item| {
-
item.timestamp <= end_limit && item.timestamp >= start_limit
-
})
-
})
-
.map(|res| res.map_err(AppError::from));
-
Ok(Some(items))
};
-
Either::Left(
-
handle
-
.range(..end_key)
-
.rev()
-
.map_while(move |res| res.map_err(AppError::from).and_then(map_block).transpose())
-
.collect::<Vec<_>>()
-
.into_iter()
-
.rev()
-
.flatten()
-
.flatten(),
-
)
}
pub fn tracking_since(&self) -> AppResult<u64> {
···
let Some(handle) = self.get_handle("app.bsky.feed.like") else {
return Ok(0);
};
-
let Some((timestamps_raw, _)) = handle.first_key_value()? else {
return Ok(0);
};
let mut timestamp_reader = Cursor::new(timestamps_raw);
···
use std::{
fmt::Debug,
io::Cursor,
ops::{Bound, Deref, RangeBounds},
path::Path,
time::Duration,
+
u64,
};
+
use ahash::{AHashMap, AHashSet};
use byteview::StrView;
use fjall::{Keyspace, Partition, PartitionCreateOptions};
use itertools::{Either, Itertools};
···
}
pub struct DbInfo {
+
pub nsids: AHashMap<SmolStr, Vec<usize>>,
pub disk_size: u64,
}
···
impl Default for DbConfig {
fn default() -> Self {
Self {
+
ks_config: fjall::Config::default()
+
.cache_size(1024 * 1024 * 512)
+
.max_write_buffer_size(u64::MAX),
min_block_size: 1000,
+
max_block_size: 250_000,
max_last_activity: Duration::from_secs(10),
}
}
···
pub cfg: DbConfig,
pub ks: Keyspace,
counts: Partition,
+
hits: scc::HashIndex<SmolStr, Arc<LexiconHandle>, ahash::RandomState>,
sync_pool: threadpool::ThreadPool,
event_broadcaster: broadcast::Sender<(SmolStr, NsidCounts)>,
eps: RateTracker<100>, // 100 millis buckets
···
pub fn sync(&self, all: bool) -> AppResult<()> {
let start = CLOCK.now();
// prepare all the data
+
let nsids_len = self.hits.len();
+
let mut data = Vec::with_capacity(nsids_len);
+
let mut nsids = AHashSet::with_capacity(nsids_len);
let _guard = scc::ebr::Guard::new();
+
for (nsid, handle) in self.hits.iter(&_guard) {
let mut nsid_data = Vec::with_capacity(2);
+
// let mut total_count = 0;
let is_too_old = handle.since_last_activity() > self.cfg.max_last_activity;
// if we disconnect for a long time, we want to sync all of what we
// have to avoid having many small blocks (even if we run compaction
···
if count > 0 && (all || data_count > 0 || is_too_old) {
for _ in 0..data_count {
nsid_data.push((handle.clone(), block_size));
+
// total_count += block_size;
}
// only sync remainder if we haven't met block size
let remainder = count % block_size;
if (all || data_count == 0) && remainder > 0 {
nsid_data.push((handle.clone(), remainder));
+
// total_count += remainder;
}
}
let _span = handle.span().entered();
+
if nsid_data.len() > 0 {
+
// tracing::info!(
+
// {blocks = %nsid_data.len(), count = %total_count},
+
// "will encode & sync",
+
// );
+
nsids.insert(nsid.clone());
+
data.push(nsid_data);
+
}
}
drop(_guard);
···
for (block, handle) in chunk {
self.sync_pool.execute(move || {
let _span = handle.span().entered();
+
let written = block.written;
+
match handle.insert_block(block) {
Ok(_) => {
+
tracing::info!({count = %written}, "synced")
}
Err(err) => tracing::error!({ err = %err }, "failed to sync block"),
}
···
AppResult::Ok(())
})?;
self.sync_pool.join();
+
+
// update snapshots for all (changed) handles
+
for nsid in nsids {
+
self.hits.peek_with(&nsid, |_, handle| handle.update_tree());
+
}
+
tracing::info!(time = %start.elapsed().as_secs_f64(), "synced all blocks");
Ok(())
···
let Some(handle) = self.get_handle(nsid) else {
return Ok(());
};
+
handle.compact(max_count, range, sort)?;
+
handle.update_tree();
+
Ok(())
}
pub fn compact_all(
···
pub fn major_compact(&self) -> AppResult<()> {
self.compact_all(self.cfg.max_block_size, .., true)?;
Ok(())
}
···
}
pub fn ingest_events(&self, events: impl Iterator<Item = EventRecord>) -> AppResult<()> {
+
let mut seen_events = 0;
for (key, chunk) in events.chunk_by(|event| event.nsid.clone()).into_iter() {
let mut counts = self.get_count(&key)?;
self.ensure_handle(&key).queue(chunk.inspect(|e| {
// increment count
counts.last_seen = e.timestamp;
···
} else {
counts.count += 1;
}
+
seen_events += 1;
}));
self.insert_count(&key, &counts)?;
if self.event_broadcaster.receiver_count() > 0 {
let _ = self.event_broadcaster.send((key, counts));
}
}
+
self.eps.observe(seen_events);
Ok(())
}
···
}
pub fn info(&self) -> AppResult<DbInfo> {
+
let mut nsids = AHashMap::new();
for nsid in self.get_nsids() {
let Some(handle) = self.get_handle(&nsid) else {
continue;
};
+
let block_lens = handle
+
.read()
+
.iter()
+
.rev()
+
.try_fold(Vec::new(), |mut acc, item| {
+
let (key, value) = item?;
+
let mut timestamps = Cursor::new(key);
+
let start_timestamp = timestamps.read_varint()?;
+
let decoder = ItemDecoder::new(Cursor::new(value), start_timestamp)?;
+
acc.push(decoder.item_count());
+
AppResult::Ok(acc)
+
})?;
nsids.insert(nsid.to_smolstr(), block_lens);
}
Ok(DbInfo {
···
&self,
nsid: &str,
range: impl RangeBounds<u64> + std::fmt::Debug,
+
max_items: usize,
) -> impl Iterator<Item = AppResult<handle::Item>> {
let start_limit = match range.start_bound().cloned() {
Bound::Included(start) => start,
···
return Either::Right(std::iter::empty());
};
+
// let mut ts = CLOCK.now();
+
let map_block = move |(res, current_item_count)| -> AppResult<(Option<_>, usize)> {
+
if current_item_count >= max_items {
+
return Ok((None, current_item_count));
+
}
+
let (key, val) = res?;
let mut key_reader = Cursor::new(key);
let start_timestamp = key_reader.read_varint::<u64>()?;
+
// let end_timestamp = key_reader.read_varint::<u64>()?;
if start_timestamp < start_limit {
+
// tracing::info!(
+
// "stopped at block with timestamps {start_timestamp}..{end_timestamp} because {start_limit} is greater"
+
// );
+
return Ok((None, current_item_count));
}
+
let decoder = handle::ItemDecoder::new(Cursor::new(val), start_timestamp)?;
+
let current_item_count = current_item_count + decoder.item_count();
+
// tracing::info!(
+
// "took {}ns to get block with size {}",
+
// ts.elapsed().as_nanos(),
+
// decoder.item_count()
+
// );
+
// ts = CLOCK.now();
+
Ok((
+
Some(
+
decoder
+
.take_while(move |item| {
+
item.as_ref().map_or(true, |item| {
+
item.timestamp <= end_limit && item.timestamp >= start_limit
+
})
+
})
+
.map(|res| res.map_err(AppError::from)),
+
),
+
current_item_count,
+
))
};
+
let (blocks, _counted) = handle
+
.read()
+
.range(..end_key)
+
.map(|res| res.map_err(AppError::from))
+
.rev()
+
.fold_while(
+
(Vec::with_capacity(20), 0),
+
|(mut blocks, current_item_count), res| {
+
use itertools::FoldWhile::*;
+
+
match map_block((res, current_item_count)) {
+
Ok((Some(block), current_item_count)) => {
+
blocks.push(Ok(block));
+
Continue((blocks, current_item_count))
+
}
+
Ok((None, current_item_count)) => Done((blocks, current_item_count)),
+
Err(err) => {
+
blocks.push(Err(err));
+
Done((blocks, current_item_count))
+
}
+
}
+
},
+
)
+
.into_inner();
+
+
// tracing::info!(
+
// "got blocks with size {}, item count {counted}",
+
// blocks.len()
+
// );
+
+
Either::Left(blocks.into_iter().rev().flatten().flatten())
}
pub fn tracking_since(&self) -> AppResult<u64> {
···
let Some(handle) = self.get_handle("app.bsky.feed.like") else {
return Ok(0);
};
+
let Some((timestamps_raw, _)) = handle.read().first_key_value()? else {
return Ok(0);
};
let mut timestamp_reader = Cursor::new(timestamps_raw);
+21 -11
server/src/jetstream.rs
···
pub struct JetstreamClient {
stream: Option<WebSocketStream<MaybeTlsStream<TcpStream>>>,
tls_connector: tokio_websockets::Connector,
-
url: SmolStr,
}
impl JetstreamClient {
-
pub fn new(url: &str) -> AppResult<Self> {
Ok(Self {
stream: None,
tls_connector: tokio_websockets::Connector::new()?,
-
url: SmolStr::new(url),
})
}
pub async fn connect(&mut self) -> AppResult<()> {
-
let (stream, _) = ClientBuilder::new()
-
.connector(&self.tls_connector)
-
.uri(&self.url)?
-
.connect()
-
.await?;
-
self.stream = Some(stream);
-
tracing::info!("connected to jetstream ({})", self.url);
-
Ok(())
}
// automatically retries connection, only returning error if it fails many times
···
pub struct JetstreamClient {
stream: Option<WebSocketStream<MaybeTlsStream<TcpStream>>>,
tls_connector: tokio_websockets::Connector,
+
urls: Vec<SmolStr>,
}
impl JetstreamClient {
+
pub fn new(urls: impl IntoIterator<Item = impl Into<SmolStr>>) -> AppResult<Self> {
Ok(Self {
stream: None,
tls_connector: tokio_websockets::Connector::new()?,
+
urls: urls.into_iter().map(Into::into).collect(),
})
}
pub async fn connect(&mut self) -> AppResult<()> {
+
for uri in &self.urls {
+
let conn_result = ClientBuilder::new()
+
.connector(&self.tls_connector)
+
.uri(uri)?
+
.connect()
+
.await;
+
match conn_result {
+
Ok((stream, _)) => {
+
self.stream = Some(stream);
+
tracing::info!("connected to jetstream {}", uri);
+
return Ok(());
+
}
+
Err(err) => {
+
tracing::error!("failed to connect to jetstream {uri}: {err}");
+
}
+
};
+
}
+
Err(anyhow!("failed to connect to any jetstream server").into())
}
// automatically retries connection, only returning error if it fails many times
+40 -16
server/src/main.rs
···
-
use std::{ops::Deref, time::Duration, u64};
use itertools::Itertools;
use rclite::Arc;
···
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
-
#[cfg(target_env = "msvc")]
-
#[global_allocator]
-
static ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc;
-
#[tokio::main]
async fn main() {
tracing_subscriber::fmt::fmt()
···
debug();
return;
}
Some(x) => {
tracing::error!("unknown command: {}", x);
return;
···
.install_default()
.expect("cant install rustls crypto provider");
-
let mut jetstream =
-
match JetstreamClient::new("wss://jetstream2.us-west.bsky.network/subscribe") {
-
Ok(client) => client,
-
Err(err) => {
-
tracing::error!("can't create jetstream client: {err}");
-
return;
-
}
-
};
let (event_tx, mut event_rx) = tokio::sync::mpsc::channel(1000);
let consume_events = tokio::spawn({
···
move || {
let mut buffer = Vec::new();
loop {
-
let read = event_rx.blocking_recv_many(&mut buffer, 100);
if let Err(err) = db.ingest_events(buffer.drain(..)) {
tracing::error!("failed to ingest events: {}", err);
}
···
if db.is_shutting_down() {
return;
}
-
let end = get_time() - compact_period / 2;
let start = end - compact_period;
let range = start.as_secs()..end.as_secs();
tracing::info!(
···
db.sync(true).expect("cant sync db");
}
fn debug() {
let db = Db::new(DbConfig::default(), CancellationToken::new()).expect("couldnt create db");
let info = db.info().expect("cant get db info");
···
threads.push(std::thread::spawn(move || {
tracing::info!("{}: migrating...", nsid.deref());
let mut count = 0_u64;
-
for hits in from.get_hits(&nsid, ..).chunks(100000).into_iter() {
to.ingest_events(hits.map(|hit| {
count += 1;
let hit = hit.expect("cant decode hit");
···
+
use std::{ops::Deref, time::Duration, u64, usize};
use itertools::Itertools;
use rclite::Arc;
···
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
#[tokio::main]
async fn main() {
tracing_subscriber::fmt::fmt()
···
debug();
return;
}
+
Some("print") => {
+
print_all();
+
return;
+
}
Some(x) => {
tracing::error!("unknown command: {}", x);
return;
···
.install_default()
.expect("cant install rustls crypto provider");
+
let urls = [
+
"wss://jetstream1.us-west.bsky.network/subscribe",
+
"wss://jetstream2.us-west.bsky.network/subscribe",
+
"wss://jetstream2.fr.hose.cam/subscribe",
+
"wss://jetstream.fire.hose.cam/subscribe",
+
];
+
let mut jetstream = match JetstreamClient::new(urls) {
+
Ok(client) => client,
+
Err(err) => {
+
tracing::error!("can't create jetstream client: {err}");
+
return;
+
}
+
};
let (event_tx, mut event_rx) = tokio::sync::mpsc::channel(1000);
let consume_events = tokio::spawn({
···
move || {
let mut buffer = Vec::new();
loop {
+
let read = event_rx.blocking_recv_many(&mut buffer, 500);
if let Err(err) = db.ingest_events(buffer.drain(..)) {
tracing::error!("failed to ingest events: {}", err);
}
···
if db.is_shutting_down() {
return;
}
+
let end = get_time();
let start = end - compact_period;
let range = start.as_secs()..end.as_secs();
tracing::info!(
···
db.sync(true).expect("cant sync db");
}
+
fn print_all() {
+
let db = Db::new(DbConfig::default(), CancellationToken::new()).expect("couldnt create db");
+
let nsids = db.get_nsids().collect::<Vec<_>>();
+
let mut count = 0_usize;
+
for nsid in nsids {
+
println!("{}:", nsid.deref());
+
for hit in db.get_hits(&nsid, .., usize::MAX) {
+
let hit = hit.expect("aaa");
+
println!("{} {}", hit.timestamp, hit.deser().unwrap().deleted);
+
count += 1;
+
}
+
}
+
println!("total hits: {}", count);
+
}
+
fn debug() {
let db = Db::new(DbConfig::default(), CancellationToken::new()).expect("couldnt create db");
let info = db.info().expect("cant get db info");
···
threads.push(std::thread::spawn(move || {
tracing::info!("{}: migrating...", nsid.deref());
let mut count = 0_u64;
+
for hits in from
+
.get_hits(&nsid, .., usize::MAX)
+
.chunks(100000)
+
.into_iter()
+
{
to.ingest_events(hits.map(|hit| {
count += 1;
let hit = hit.expect("cant decode hit");
+66
server/src/utils.rs
···
use std::io::{self, Read, Write};
use std::sync::atomic::{AtomicU64, Ordering};
use std::time::Duration;
use byteview::ByteView;
use ordered_varint::Variable;
pub fn get_time() -> Duration {
std::time::SystemTime::now()
···
}
}
}
···
use std::io::{self, Read, Write};
+
use std::ops::Deref;
use std::sync::atomic::{AtomicU64, Ordering};
use std::time::Duration;
+
use arc_swap::RefCnt;
use byteview::ByteView;
use ordered_varint::Variable;
+
use rclite::Arc;
pub fn get_time() -> Duration {
std::time::SystemTime::now()
···
}
}
}
+
+
pub type ArcliteSwap<T> = arc_swap::ArcSwapAny<ArcRefCnt<T>>;
+
+
pub struct ArcRefCnt<T>(Arc<T>);
+
+
impl<T> ArcRefCnt<T> {
+
pub fn new(value: T) -> Self {
+
Self(Arc::new(value))
+
}
+
}
+
+
impl<T> Deref for ArcRefCnt<T> {
+
type Target = T;
+
+
fn deref(&self) -> &Self::Target {
+
&self.0
+
}
+
}
+
+
impl<T> Clone for ArcRefCnt<T> {
+
fn clone(&self) -> Self {
+
Self(self.0.clone())
+
}
+
}
+
+
// SAFETY: uhhhhhhhh copied the Arc impl from arc_swap xd
+
unsafe impl<T> RefCnt for ArcRefCnt<T> {
+
type Base = T;
+
+
fn into_ptr(me: Self) -> *mut Self::Base {
+
Arc::into_raw(me.0) as *mut T
+
}
+
+
fn as_ptr(me: &Self) -> *mut Self::Base {
+
// Slightly convoluted way to do this, but this avoids stacked borrows violations. The same
+
// intention as
+
//
+
// me as &T as *const T as *mut T
+
//
+
// We first create a "shallow copy" of me - one that doesn't really own its ref count
+
// (that's OK, me _does_ own it, so it can't be destroyed in the meantime).
+
// Then we can use into_raw (which preserves not having the ref count).
+
//
+
// We need to "revert" the changes we did. In current std implementation, the combination
+
// of from_raw and forget is no-op. But formally, into_raw shall be paired with from_raw
+
// and that read shall be paired with forget to properly "close the brackets". In future
+
// versions of STD, these may become something else that's not really no-op (unlikely, but
+
// possible), so we future-proof it a bit.
+
+
// SAFETY: &T cast to *const T will always be aligned, initialised and valid for reads
+
let ptr = Arc::into_raw(unsafe { std::ptr::read(&me.0) });
+
let ptr = ptr as *mut T;
+
+
// SAFETY: We got the pointer from into_raw just above
+
std::mem::forget(unsafe { Arc::from_raw(ptr) });
+
+
ptr
+
}
+
+
unsafe fn from_ptr(ptr: *const Self::Base) -> Self {
+
Self(unsafe { Arc::from_raw(ptr) })
+
}
+
}