cached query babyyy
This commit is contained in:
parent
d6b6377fd2
commit
2823d4b119
9 changed files with 413 additions and 23 deletions
|
|
@ -29,10 +29,11 @@ IDK, calm down
|
|||
## TODO
|
||||
|
||||
- [x] Login & Auth
|
||||
- [x] useCachedQuery
|
||||
- [ ] Convex schemas for chats
|
||||
- [ ] Actual fucking UI for chat
|
||||
- [ ] Providers (BYOK)
|
||||
- [ ] Openrouter
|
||||
- [x] Openrouter
|
||||
- [ ] HuggingFace
|
||||
- [ ] OpenAI
|
||||
- [ ] File upload
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@
|
|||
<meta charset="utf-8" />
|
||||
<link rel="icon" href="%sveltekit.assets%/favicon.png" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Thom Chat</title>
|
||||
%sveltekit.head%
|
||||
</head>
|
||||
<body data-sveltekit-preload-data="hover">
|
||||
<body data-sveltekit-preload-data="hover" style="background: oklch(0.2409 0.0201 307.5346)">
|
||||
<div style="display: contents">%sveltekit.body%</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
|||
96
src/lib/cache/cached-query.svelte.ts
vendored
Normal file
96
src/lib/cache/cached-query.svelte.ts
vendored
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
import { useQuery as convexUseQuery } from 'convex-svelte';
|
||||
import { SessionStorageCache } from './session-cache.js';
|
||||
import { getFunctionName, type FunctionReference, type OptionalRestArgs } from 'convex/server';
|
||||
import { watch } from 'runed';
|
||||
|
||||
interface CachedQueryOptions {
|
||||
cacheKey?: string;
|
||||
ttl?: number;
|
||||
staleWhileRevalidate?: boolean;
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
interface QueryResult<T> {
|
||||
data: T | undefined;
|
||||
error: Error | undefined;
|
||||
isLoading: boolean;
|
||||
isStale: boolean;
|
||||
}
|
||||
|
||||
const globalCache = new SessionStorageCache('convex-query-cache');
|
||||
|
||||
export function useCachedQuery<
|
||||
Query extends FunctionReference<'query'>,
|
||||
Args extends OptionalRestArgs<Query>,
|
||||
>(
|
||||
query: Query,
|
||||
...args: Args extends undefined ? [] : [Args[0], CachedQueryOptions?]
|
||||
): QueryResult<Query['_returnType']> {
|
||||
const [queryArgs, options = {}] = args as [Args[0]?, CachedQueryOptions?];
|
||||
|
||||
const {
|
||||
cacheKey,
|
||||
ttl = 7 * 24 * 60 * 60 * 1000, // 1 week default
|
||||
staleWhileRevalidate = true,
|
||||
enabled = true,
|
||||
} = options;
|
||||
|
||||
// Generate cache key from query reference and args
|
||||
const key = cacheKey || `${getFunctionName(query)}:${JSON.stringify(queryArgs || {})}`;
|
||||
|
||||
// Get cached data
|
||||
const cachedData = enabled ? globalCache.get(key) : undefined;
|
||||
|
||||
// Convex query, used as soon as possible
|
||||
const convexResult = convexUseQuery(query, queryArgs, {
|
||||
// enabled: enabled && (!cachedData || !staleWhileRevalidate),
|
||||
});
|
||||
|
||||
const shouldUseCached = $derived(
|
||||
cachedData !== undefined && (staleWhileRevalidate || convexResult.isLoading)
|
||||
);
|
||||
|
||||
// Cache fresh data when available
|
||||
watch(
|
||||
() => $state.snapshot(convexResult.data),
|
||||
(data) => {
|
||||
if (data === undefined || !enabled) return;
|
||||
globalCache.set(key, data, ttl);
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
get data() {
|
||||
return shouldUseCached ? cachedData : convexResult.data;
|
||||
},
|
||||
get error() {
|
||||
return convexResult.error;
|
||||
},
|
||||
get isLoading() {
|
||||
return shouldUseCached ? false : convexResult.isLoading;
|
||||
},
|
||||
get isStale() {
|
||||
return shouldUseCached && convexResult.isLoading;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function invalidateQuery(query: FunctionReference<'query'>, queryArgs?: unknown): void {
|
||||
const key = `${getFunctionName(query)}:${JSON.stringify(queryArgs || {})}`;
|
||||
globalCache.delete(key);
|
||||
}
|
||||
|
||||
export function invalidateQueriesMatching(pattern: string | RegExp): void {
|
||||
// Note: This is a simplified implementation
|
||||
// In a real implementation, you'd need to track all cache keys
|
||||
console.warn(
|
||||
'invalidateQueriesMatching not fully implemented - consider using specific key invalidation'
|
||||
);
|
||||
}
|
||||
|
||||
export function clearQueryCache(): void {
|
||||
globalCache.clear();
|
||||
}
|
||||
|
||||
export { globalCache as queryCache };
|
||||
|
||||
136
src/lib/cache/lru-cache.ts
vendored
Normal file
136
src/lib/cache/lru-cache.ts
vendored
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
interface CacheNode<K, V> {
|
||||
key: K;
|
||||
value: V;
|
||||
size: number;
|
||||
prev: CacheNode<K, V> | null;
|
||||
next: CacheNode<K, V> | null;
|
||||
}
|
||||
|
||||
export class LRUCache<K = string, V = unknown> {
|
||||
private capacity: number;
|
||||
private currentSize = 0;
|
||||
private cache = new Map<K, CacheNode<K, V>>();
|
||||
private head: CacheNode<K, V> | null = null;
|
||||
private tail: CacheNode<K, V> | null = null;
|
||||
|
||||
constructor(maxSizeBytes = 1024 * 1024) {
|
||||
this.capacity = maxSizeBytes;
|
||||
}
|
||||
|
||||
private calculateSize(value: V): number {
|
||||
try {
|
||||
return new Blob([JSON.stringify(value)]).size;
|
||||
} catch {
|
||||
return JSON.stringify(value).length * 2;
|
||||
}
|
||||
}
|
||||
|
||||
private removeNode(node: CacheNode<K, V>): void {
|
||||
if (node.prev) {
|
||||
node.prev.next = node.next;
|
||||
} else {
|
||||
this.head = node.next;
|
||||
}
|
||||
|
||||
if (node.next) {
|
||||
node.next.prev = node.prev;
|
||||
} else {
|
||||
this.tail = node.prev;
|
||||
}
|
||||
}
|
||||
|
||||
private addToHead(node: CacheNode<K, V>): void {
|
||||
node.prev = null;
|
||||
node.next = this.head;
|
||||
|
||||
if (this.head) {
|
||||
this.head.prev = node;
|
||||
}
|
||||
|
||||
this.head = node;
|
||||
|
||||
if (!this.tail) {
|
||||
this.tail = node;
|
||||
}
|
||||
}
|
||||
|
||||
private evictLRU(): void {
|
||||
while (this.tail && this.currentSize > this.capacity) {
|
||||
const lastNode = this.tail;
|
||||
this.removeNode(lastNode);
|
||||
this.cache.delete(lastNode.key);
|
||||
this.currentSize -= lastNode.size;
|
||||
}
|
||||
}
|
||||
|
||||
get(key: K): V | undefined {
|
||||
const node = this.cache.get(key);
|
||||
if (!node) return undefined;
|
||||
|
||||
this.removeNode(node);
|
||||
this.addToHead(node);
|
||||
|
||||
return node.value;
|
||||
}
|
||||
|
||||
set(key: K, value: V): void {
|
||||
const size = this.calculateSize(value);
|
||||
|
||||
if (size > this.capacity) {
|
||||
return;
|
||||
}
|
||||
|
||||
const existingNode = this.cache.get(key);
|
||||
|
||||
if (existingNode) {
|
||||
existingNode.value = value;
|
||||
this.currentSize = this.currentSize - existingNode.size + size;
|
||||
existingNode.size = size;
|
||||
this.removeNode(existingNode);
|
||||
this.addToHead(existingNode);
|
||||
} else {
|
||||
const newNode: CacheNode<K, V> = {
|
||||
key,
|
||||
value,
|
||||
size,
|
||||
prev: null,
|
||||
next: null,
|
||||
};
|
||||
|
||||
this.currentSize += size;
|
||||
this.cache.set(key, newNode);
|
||||
this.addToHead(newNode);
|
||||
}
|
||||
|
||||
this.evictLRU();
|
||||
}
|
||||
|
||||
delete(key: K): boolean {
|
||||
const node = this.cache.get(key);
|
||||
if (!node) return false;
|
||||
|
||||
this.removeNode(node);
|
||||
this.cache.delete(key);
|
||||
this.currentSize -= node.size;
|
||||
return true;
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.cache.clear();
|
||||
this.head = null;
|
||||
this.tail = null;
|
||||
this.currentSize = 0;
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this.cache.size;
|
||||
}
|
||||
|
||||
get bytes(): number {
|
||||
return this.currentSize;
|
||||
}
|
||||
|
||||
has(key: K): boolean {
|
||||
return this.cache.has(key);
|
||||
}
|
||||
}
|
||||
160
src/lib/cache/session-cache.ts
vendored
Normal file
160
src/lib/cache/session-cache.ts
vendored
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
import { LRUCache } from './lru-cache.js';
|
||||
|
||||
interface CacheEntry<T> {
|
||||
data: T;
|
||||
timestamp: number;
|
||||
ttl?: number;
|
||||
}
|
||||
|
||||
export class SessionStorageCache<T = unknown> {
|
||||
private memoryCache: LRUCache<string, CacheEntry<T>>;
|
||||
private storageKey: string;
|
||||
private writeTimeout: ReturnType<typeof setTimeout> | null = null;
|
||||
private debounceMs: number;
|
||||
private pendingWrites = new Set<string>();
|
||||
|
||||
constructor(
|
||||
storageKey = 'query-cache',
|
||||
maxSizeBytes = 1024 * 1024,
|
||||
debounceMs = 300
|
||||
) {
|
||||
this.storageKey = storageKey;
|
||||
this.debounceMs = debounceMs;
|
||||
this.memoryCache = new LRUCache<string, CacheEntry<T>>(maxSizeBytes);
|
||||
this.loadFromSessionStorage();
|
||||
}
|
||||
|
||||
private loadFromSessionStorage(): void {
|
||||
try {
|
||||
const stored = sessionStorage.getItem(this.storageKey);
|
||||
if (!stored) return;
|
||||
|
||||
const data = JSON.parse(stored) as Record<string, CacheEntry<T>>;
|
||||
const now = Date.now();
|
||||
|
||||
for (const [key, entry] of Object.entries(data)) {
|
||||
if (entry.ttl && now - entry.timestamp > entry.ttl) {
|
||||
continue;
|
||||
}
|
||||
this.memoryCache.set(key, entry);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to load cache from sessionStorage:', error);
|
||||
}
|
||||
}
|
||||
|
||||
private debouncedWrite(): void {
|
||||
if (this.writeTimeout) {
|
||||
clearTimeout(this.writeTimeout);
|
||||
}
|
||||
|
||||
this.writeTimeout = setTimeout(() => {
|
||||
this.writeToSessionStorage();
|
||||
this.writeTimeout = null;
|
||||
}, this.debounceMs);
|
||||
}
|
||||
|
||||
private writeToSessionStorage(): void {
|
||||
try {
|
||||
const cacheData: Record<string, CacheEntry<T>> = {};
|
||||
const now = Date.now();
|
||||
|
||||
for (const key of this.pendingWrites) {
|
||||
const entry = this.memoryCache.get(key);
|
||||
if (entry && (!entry.ttl || now - entry.timestamp < entry.ttl)) {
|
||||
cacheData[key] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
const existingData = sessionStorage.getItem(this.storageKey);
|
||||
if (existingData) {
|
||||
const existing = JSON.parse(existingData) as Record<string, CacheEntry<T>>;
|
||||
for (const [key, entry] of Object.entries(existing)) {
|
||||
if (!this.pendingWrites.has(key) && (!entry.ttl || now - entry.timestamp < entry.ttl)) {
|
||||
cacheData[key] = entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sessionStorage.setItem(this.storageKey, JSON.stringify(cacheData));
|
||||
this.pendingWrites.clear();
|
||||
} catch (error) {
|
||||
console.warn('Failed to write cache to sessionStorage:', error);
|
||||
}
|
||||
}
|
||||
|
||||
get(key: string): T | undefined {
|
||||
const entry = this.memoryCache.get(key);
|
||||
if (!entry) return undefined;
|
||||
|
||||
if (entry.ttl && Date.now() - entry.timestamp > entry.ttl) {
|
||||
this.delete(key);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return entry.data;
|
||||
}
|
||||
|
||||
set(key: string, data: T, ttlMs?: number): void {
|
||||
const entry: CacheEntry<T> = {
|
||||
data,
|
||||
timestamp: Date.now(),
|
||||
ttl: ttlMs,
|
||||
};
|
||||
|
||||
this.memoryCache.set(key, entry);
|
||||
this.pendingWrites.add(key);
|
||||
this.debouncedWrite();
|
||||
}
|
||||
|
||||
delete(key: string): boolean {
|
||||
const deleted = this.memoryCache.delete(key);
|
||||
if (deleted) {
|
||||
this.pendingWrites.add(key);
|
||||
this.debouncedWrite();
|
||||
}
|
||||
return deleted;
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.memoryCache.clear();
|
||||
try {
|
||||
sessionStorage.removeItem(this.storageKey);
|
||||
} catch (error) {
|
||||
console.warn('Failed to clear sessionStorage:', error);
|
||||
}
|
||||
if (this.writeTimeout) {
|
||||
clearTimeout(this.writeTimeout);
|
||||
this.writeTimeout = null;
|
||||
}
|
||||
this.pendingWrites.clear();
|
||||
}
|
||||
|
||||
has(key: string): boolean {
|
||||
const entry = this.memoryCache.get(key);
|
||||
if (!entry) return false;
|
||||
|
||||
if (entry.ttl && Date.now() - entry.timestamp > entry.ttl) {
|
||||
this.delete(key);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this.memoryCache.size;
|
||||
}
|
||||
|
||||
get bytes(): number {
|
||||
return this.memoryCache.bytes;
|
||||
}
|
||||
|
||||
forceWrite(): void {
|
||||
if (this.writeTimeout) {
|
||||
clearTimeout(this.writeTimeout);
|
||||
this.writeTimeout = null;
|
||||
}
|
||||
this.writeToSessionStorage();
|
||||
}
|
||||
}
|
||||
|
|
@ -7,3 +7,6 @@ export const load: LayoutServerLoad = async ({ locals }) => {
|
|||
session,
|
||||
};
|
||||
};
|
||||
|
||||
// Makes caching easier, and tbf, we don't need SSR anyways here
|
||||
export const ssr = false;
|
||||
|
|
|
|||
|
|
@ -1,8 +1,5 @@
|
|||
<script lang="ts">
|
||||
import { Provider, type ProviderMeta } from '$lib/types';
|
||||
import { useQuery } from 'convex-svelte';
|
||||
import { api } from '$lib/backend/convex/_generated/api';
|
||||
import { session } from '$lib/state/session.svelte.js';
|
||||
import ProviderCard from './provider-card.svelte';
|
||||
|
||||
const allProviders = Object.values(Provider);
|
||||
|
|
|
|||
|
|
@ -1,14 +1,15 @@
|
|||
<script lang="ts">
|
||||
import * as Card from '$lib/components/ui/card';
|
||||
import { KeyIcon } from '@lucide/svelte';
|
||||
import { Input } from '$lib/components/ui/input';
|
||||
import { Button } from '$lib/components/ui/button';
|
||||
import { Link } from '$lib/components/ui/link';
|
||||
import { useConvexClient, useQuery } from 'convex-svelte';
|
||||
import { api } from '$lib/backend/convex/_generated/api';
|
||||
import { LocalToasts } from '$lib/builders/local-toasts.svelte';
|
||||
import { useCachedQuery } from '$lib/cache/cached-query.svelte';
|
||||
import { Button } from '$lib/components/ui/button';
|
||||
import * as Card from '$lib/components/ui/card';
|
||||
import { Input } from '$lib/components/ui/input';
|
||||
import { Link } from '$lib/components/ui/link';
|
||||
import { session } from '$lib/state/session.svelte.js';
|
||||
import type { Provider, ProviderMeta } from '$lib/types';
|
||||
import { LocalToasts } from '$lib/builders/local-toasts.svelte';
|
||||
import { KeyIcon } from '@lucide/svelte';
|
||||
import { useConvexClient } from 'convex-svelte';
|
||||
import { ResultAsync } from 'neverthrow';
|
||||
|
||||
type Props = {
|
||||
|
|
@ -19,7 +20,7 @@
|
|||
let { provider, meta }: Props = $props();
|
||||
const id = $props.id();
|
||||
|
||||
const keyQuery = useQuery(api.user_keys.get, {
|
||||
const keyQuery = useCachedQuery(api.user_keys.get, {
|
||||
user_id: session.current?.user.id ?? '',
|
||||
provider,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,20 +1,15 @@
|
|||
<script lang="ts">
|
||||
import { Provider } from '$lib/types.js';
|
||||
import { useQuery } from 'convex-svelte';
|
||||
import ModelCard from './model-card.svelte';
|
||||
import { session } from '$lib/state/session.svelte';
|
||||
import { api } from '$lib/backend/convex/_generated/api';
|
||||
import { useCachedQuery } from '$lib/cache/cached-query.svelte';
|
||||
import { session } from '$lib/state/session.svelte';
|
||||
import { Provider } from '$lib/types.js';
|
||||
import ModelCard from './model-card.svelte';
|
||||
|
||||
let { data } = $props();
|
||||
|
||||
const enabledModels = useQuery(api.user_enabled_models.get_enabled, {
|
||||
const enabledModels = useCachedQuery(api.user_enabled_models.get_enabled, {
|
||||
user_id: session.current?.user.id ?? '',
|
||||
});
|
||||
|
||||
$inspect(
|
||||
enabledModels.data,
|
||||
!!enabledModels.data?.[`${Provider.OpenRouter}:${data.openRouterModels[0].id}`]
|
||||
);
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue