From 2823d4b119c3d38e0afd71fc4a8be5ccceb3fac9 Mon Sep 17 00:00:00 2001
From: "Thomas G. Lopes" <26071571+TGlide@users.noreply.github.com>
Date: Sun, 15 Jun 2025 22:23:04 +0100
Subject: [PATCH] cached query babyyy
---
README.md | 3 +-
src/app.html | 3 +-
src/lib/cache/cached-query.svelte.ts | 96 +++++++++++
src/lib/cache/lru-cache.ts | 136 +++++++++++++++
src/lib/cache/session-cache.ts | 160 ++++++++++++++++++
src/routes/+layout.server.ts | 3 +
src/routes/account/api-keys/+page.svelte | 3 -
.../account/api-keys/provider-card.svelte | 17 +-
src/routes/account/models/+page.svelte | 15 +-
9 files changed, 413 insertions(+), 23 deletions(-)
create mode 100644 src/lib/cache/cached-query.svelte.ts
create mode 100644 src/lib/cache/lru-cache.ts
create mode 100644 src/lib/cache/session-cache.ts
diff --git a/README.md b/README.md
index 9fc2ddb..0203e70 100644
--- a/README.md
+++ b/README.md
@@ -29,10 +29,11 @@ IDK, calm down
## TODO
- [x] Login & Auth
+- [x] useCachedQuery
- [ ] Convex schemas for chats
- [ ] Actual fucking UI for chat
- [ ] Providers (BYOK)
- - [ ] Openrouter
+ - [x] Openrouter
- [ ] HuggingFace
- [ ] OpenAI
- [ ] File upload
diff --git a/src/app.html b/src/app.html
index 77a5ff5..179171a 100644
--- a/src/app.html
+++ b/src/app.html
@@ -4,9 +4,10 @@
+
Thom Chat
%sveltekit.head%
-
+
%sveltekit.body%
diff --git a/src/lib/cache/cached-query.svelte.ts b/src/lib/cache/cached-query.svelte.ts
new file mode 100644
index 0000000..4f2756f
--- /dev/null
+++ b/src/lib/cache/cached-query.svelte.ts
@@ -0,0 +1,96 @@
+import { useQuery as convexUseQuery } from 'convex-svelte';
+import { SessionStorageCache } from './session-cache.js';
+import { getFunctionName, type FunctionReference, type OptionalRestArgs } from 'convex/server';
+import { watch } from 'runed';
+
+interface CachedQueryOptions {
+ cacheKey?: string;
+ ttl?: number;
+ staleWhileRevalidate?: boolean;
+ enabled?: boolean;
+}
+
+interface QueryResult {
+ data: T | undefined;
+ error: Error | undefined;
+ isLoading: boolean;
+ isStale: boolean;
+}
+
+const globalCache = new SessionStorageCache('convex-query-cache');
+
+export function useCachedQuery<
+ Query extends FunctionReference<'query'>,
+ Args extends OptionalRestArgs,
+>(
+ query: Query,
+ ...args: Args extends undefined ? [] : [Args[0], CachedQueryOptions?]
+): QueryResult {
+ const [queryArgs, options = {}] = args as [Args[0]?, CachedQueryOptions?];
+
+ const {
+ cacheKey,
+ ttl = 7 * 24 * 60 * 60 * 1000, // 1 week default
+ staleWhileRevalidate = true,
+ enabled = true,
+ } = options;
+
+ // Generate cache key from query reference and args
+ const key = cacheKey || `${getFunctionName(query)}:${JSON.stringify(queryArgs || {})}`;
+
+ // Get cached data
+ const cachedData = enabled ? globalCache.get(key) : undefined;
+
+ // Convex query, used as soon as possible
+ const convexResult = convexUseQuery(query, queryArgs, {
+ // enabled: enabled && (!cachedData || !staleWhileRevalidate),
+ });
+
+ const shouldUseCached = $derived(
+ cachedData !== undefined && (staleWhileRevalidate || convexResult.isLoading)
+ );
+
+ // Cache fresh data when available
+ watch(
+ () => $state.snapshot(convexResult.data),
+ (data) => {
+ if (data === undefined || !enabled) return;
+ globalCache.set(key, data, ttl);
+ }
+ );
+
+ return {
+ get data() {
+ return shouldUseCached ? cachedData : convexResult.data;
+ },
+ get error() {
+ return convexResult.error;
+ },
+ get isLoading() {
+ return shouldUseCached ? false : convexResult.isLoading;
+ },
+ get isStale() {
+ return shouldUseCached && convexResult.isLoading;
+ },
+ };
+}
+
+export function invalidateQuery(query: FunctionReference<'query'>, queryArgs?: unknown): void {
+ const key = `${getFunctionName(query)}:${JSON.stringify(queryArgs || {})}`;
+ globalCache.delete(key);
+}
+
+export function invalidateQueriesMatching(pattern: string | RegExp): void {
+ // Note: This is a simplified implementation
+ // In a real implementation, you'd need to track all cache keys
+ console.warn(
+ 'invalidateQueriesMatching not fully implemented - consider using specific key invalidation'
+ );
+}
+
+export function clearQueryCache(): void {
+ globalCache.clear();
+}
+
+export { globalCache as queryCache };
+
diff --git a/src/lib/cache/lru-cache.ts b/src/lib/cache/lru-cache.ts
new file mode 100644
index 0000000..18334f7
--- /dev/null
+++ b/src/lib/cache/lru-cache.ts
@@ -0,0 +1,136 @@
+interface CacheNode {
+ key: K;
+ value: V;
+ size: number;
+ prev: CacheNode | null;
+ next: CacheNode | null;
+}
+
+export class LRUCache {
+ private capacity: number;
+ private currentSize = 0;
+ private cache = new Map>();
+ private head: CacheNode | null = null;
+ private tail: CacheNode | null = null;
+
+ constructor(maxSizeBytes = 1024 * 1024) {
+ this.capacity = maxSizeBytes;
+ }
+
+ private calculateSize(value: V): number {
+ try {
+ return new Blob([JSON.stringify(value)]).size;
+ } catch {
+ return JSON.stringify(value).length * 2;
+ }
+ }
+
+ private removeNode(node: CacheNode): void {
+ if (node.prev) {
+ node.prev.next = node.next;
+ } else {
+ this.head = node.next;
+ }
+
+ if (node.next) {
+ node.next.prev = node.prev;
+ } else {
+ this.tail = node.prev;
+ }
+ }
+
+ private addToHead(node: CacheNode): void {
+ node.prev = null;
+ node.next = this.head;
+
+ if (this.head) {
+ this.head.prev = node;
+ }
+
+ this.head = node;
+
+ if (!this.tail) {
+ this.tail = node;
+ }
+ }
+
+ private evictLRU(): void {
+ while (this.tail && this.currentSize > this.capacity) {
+ const lastNode = this.tail;
+ this.removeNode(lastNode);
+ this.cache.delete(lastNode.key);
+ this.currentSize -= lastNode.size;
+ }
+ }
+
+ get(key: K): V | undefined {
+ const node = this.cache.get(key);
+ if (!node) return undefined;
+
+ this.removeNode(node);
+ this.addToHead(node);
+
+ return node.value;
+ }
+
+ set(key: K, value: V): void {
+ const size = this.calculateSize(value);
+
+ if (size > this.capacity) {
+ return;
+ }
+
+ const existingNode = this.cache.get(key);
+
+ if (existingNode) {
+ existingNode.value = value;
+ this.currentSize = this.currentSize - existingNode.size + size;
+ existingNode.size = size;
+ this.removeNode(existingNode);
+ this.addToHead(existingNode);
+ } else {
+ const newNode: CacheNode = {
+ key,
+ value,
+ size,
+ prev: null,
+ next: null,
+ };
+
+ this.currentSize += size;
+ this.cache.set(key, newNode);
+ this.addToHead(newNode);
+ }
+
+ this.evictLRU();
+ }
+
+ delete(key: K): boolean {
+ const node = this.cache.get(key);
+ if (!node) return false;
+
+ this.removeNode(node);
+ this.cache.delete(key);
+ this.currentSize -= node.size;
+ return true;
+ }
+
+ clear(): void {
+ this.cache.clear();
+ this.head = null;
+ this.tail = null;
+ this.currentSize = 0;
+ }
+
+ get size(): number {
+ return this.cache.size;
+ }
+
+ get bytes(): number {
+ return this.currentSize;
+ }
+
+ has(key: K): boolean {
+ return this.cache.has(key);
+ }
+}
\ No newline at end of file
diff --git a/src/lib/cache/session-cache.ts b/src/lib/cache/session-cache.ts
new file mode 100644
index 0000000..445924f
--- /dev/null
+++ b/src/lib/cache/session-cache.ts
@@ -0,0 +1,160 @@
+import { LRUCache } from './lru-cache.js';
+
+interface CacheEntry {
+ data: T;
+ timestamp: number;
+ ttl?: number;
+}
+
+export class SessionStorageCache {
+ private memoryCache: LRUCache>;
+ private storageKey: string;
+ private writeTimeout: ReturnType | null = null;
+ private debounceMs: number;
+ private pendingWrites = new Set();
+
+ constructor(
+ storageKey = 'query-cache',
+ maxSizeBytes = 1024 * 1024,
+ debounceMs = 300
+ ) {
+ this.storageKey = storageKey;
+ this.debounceMs = debounceMs;
+ this.memoryCache = new LRUCache>(maxSizeBytes);
+ this.loadFromSessionStorage();
+ }
+
+ private loadFromSessionStorage(): void {
+ try {
+ const stored = sessionStorage.getItem(this.storageKey);
+ if (!stored) return;
+
+ const data = JSON.parse(stored) as Record>;
+ const now = Date.now();
+
+ for (const [key, entry] of Object.entries(data)) {
+ if (entry.ttl && now - entry.timestamp > entry.ttl) {
+ continue;
+ }
+ this.memoryCache.set(key, entry);
+ }
+ } catch (error) {
+ console.warn('Failed to load cache from sessionStorage:', error);
+ }
+ }
+
+ private debouncedWrite(): void {
+ if (this.writeTimeout) {
+ clearTimeout(this.writeTimeout);
+ }
+
+ this.writeTimeout = setTimeout(() => {
+ this.writeToSessionStorage();
+ this.writeTimeout = null;
+ }, this.debounceMs);
+ }
+
+ private writeToSessionStorage(): void {
+ try {
+ const cacheData: Record> = {};
+ const now = Date.now();
+
+ for (const key of this.pendingWrites) {
+ const entry = this.memoryCache.get(key);
+ if (entry && (!entry.ttl || now - entry.timestamp < entry.ttl)) {
+ cacheData[key] = entry;
+ }
+ }
+
+ const existingData = sessionStorage.getItem(this.storageKey);
+ if (existingData) {
+ const existing = JSON.parse(existingData) as Record>;
+ for (const [key, entry] of Object.entries(existing)) {
+ if (!this.pendingWrites.has(key) && (!entry.ttl || now - entry.timestamp < entry.ttl)) {
+ cacheData[key] = entry;
+ }
+ }
+ }
+
+ sessionStorage.setItem(this.storageKey, JSON.stringify(cacheData));
+ this.pendingWrites.clear();
+ } catch (error) {
+ console.warn('Failed to write cache to sessionStorage:', error);
+ }
+ }
+
+ get(key: string): T | undefined {
+ const entry = this.memoryCache.get(key);
+ if (!entry) return undefined;
+
+ if (entry.ttl && Date.now() - entry.timestamp > entry.ttl) {
+ this.delete(key);
+ return undefined;
+ }
+
+ return entry.data;
+ }
+
+ set(key: string, data: T, ttlMs?: number): void {
+ const entry: CacheEntry = {
+ data,
+ timestamp: Date.now(),
+ ttl: ttlMs,
+ };
+
+ this.memoryCache.set(key, entry);
+ this.pendingWrites.add(key);
+ this.debouncedWrite();
+ }
+
+ delete(key: string): boolean {
+ const deleted = this.memoryCache.delete(key);
+ if (deleted) {
+ this.pendingWrites.add(key);
+ this.debouncedWrite();
+ }
+ return deleted;
+ }
+
+ clear(): void {
+ this.memoryCache.clear();
+ try {
+ sessionStorage.removeItem(this.storageKey);
+ } catch (error) {
+ console.warn('Failed to clear sessionStorage:', error);
+ }
+ if (this.writeTimeout) {
+ clearTimeout(this.writeTimeout);
+ this.writeTimeout = null;
+ }
+ this.pendingWrites.clear();
+ }
+
+ has(key: string): boolean {
+ const entry = this.memoryCache.get(key);
+ if (!entry) return false;
+
+ if (entry.ttl && Date.now() - entry.timestamp > entry.ttl) {
+ this.delete(key);
+ return false;
+ }
+
+ return true;
+ }
+
+ get size(): number {
+ return this.memoryCache.size;
+ }
+
+ get bytes(): number {
+ return this.memoryCache.bytes;
+ }
+
+ forceWrite(): void {
+ if (this.writeTimeout) {
+ clearTimeout(this.writeTimeout);
+ this.writeTimeout = null;
+ }
+ this.writeToSessionStorage();
+ }
+}
\ No newline at end of file
diff --git a/src/routes/+layout.server.ts b/src/routes/+layout.server.ts
index e81c390..d736029 100644
--- a/src/routes/+layout.server.ts
+++ b/src/routes/+layout.server.ts
@@ -7,3 +7,6 @@ export const load: LayoutServerLoad = async ({ locals }) => {
session,
};
};
+
+// Makes caching easier, and tbf, we don't need SSR anyways here
+export const ssr = false;
diff --git a/src/routes/account/api-keys/+page.svelte b/src/routes/account/api-keys/+page.svelte
index 969aa54..037f394 100644
--- a/src/routes/account/api-keys/+page.svelte
+++ b/src/routes/account/api-keys/+page.svelte
@@ -1,8 +1,5 @@