feat(FontStore): add fetchAllPagesTo for parallel batch page loading
This commit is contained in:
@@ -561,4 +561,67 @@ describe('FontStore', () => {
|
||||
store.destroy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('fetchAllPagesTo', () => {
|
||||
beforeEach(() => {
|
||||
fetch.mockReset();
|
||||
queryClient.clear();
|
||||
});
|
||||
|
||||
it('fetches all missing pages in parallel up to targetIndex', async () => {
|
||||
// First page already loaded (offset 0, limit 10, total 50)
|
||||
const firstFonts = generateMockFonts(10);
|
||||
fetch.mockResolvedValueOnce(makeResponse(firstFonts, { total: 50, limit: 10, offset: 0 }));
|
||||
const store = makeStore();
|
||||
await store.refetch();
|
||||
flushSync();
|
||||
|
||||
expect(store.fonts).toHaveLength(10);
|
||||
|
||||
// Mock remaining pages
|
||||
for (let offset = 10; offset < 50; offset += 10) {
|
||||
fetch.mockResolvedValueOnce(
|
||||
makeResponse(generateMockFonts(10), { total: 50, limit: 10, offset }),
|
||||
);
|
||||
}
|
||||
|
||||
await store.fetchAllPagesTo(40);
|
||||
flushSync();
|
||||
|
||||
expect(store.fonts).toHaveLength(50);
|
||||
});
|
||||
|
||||
it('skips pages that fail and still merges successful ones', async () => {
|
||||
const firstFonts = generateMockFonts(10);
|
||||
fetch.mockResolvedValueOnce(makeResponse(firstFonts, { total: 30, limit: 10, offset: 0 }));
|
||||
const store = makeStore();
|
||||
await store.refetch();
|
||||
flushSync();
|
||||
|
||||
// offset=10 fails, offset=20 succeeds
|
||||
fetch.mockRejectedValueOnce(new Error('network error'));
|
||||
fetch.mockResolvedValueOnce(
|
||||
makeResponse(generateMockFonts(10), { total: 30, limit: 10, offset: 20 }),
|
||||
);
|
||||
|
||||
await store.fetchAllPagesTo(25);
|
||||
flushSync();
|
||||
|
||||
// Page at offset=20 merged, page at offset=10 missing — 20 total
|
||||
expect(store.fonts).toHaveLength(20);
|
||||
});
|
||||
|
||||
it('is a no-op when target is within already-loaded data', async () => {
|
||||
const firstFonts = generateMockFonts(10);
|
||||
fetch.mockResolvedValueOnce(makeResponse(firstFonts, { total: 50, limit: 10, offset: 0 }));
|
||||
const store = makeStore();
|
||||
await store.refetch();
|
||||
flushSync();
|
||||
|
||||
const callsBefore = fetch.mock.calls.length;
|
||||
await store.fetchAllPagesTo(5);
|
||||
|
||||
expect(fetch.mock.calls.length).toBe(callsBefore);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -242,6 +242,80 @@ export class FontStore {
|
||||
async nextPage(): Promise<void> {
|
||||
await this.#observer.fetchNextPage();
|
||||
}
|
||||
|
||||
#isCatchingUp = false;
|
||||
#inFlightOffsets = new Set<number>();
|
||||
|
||||
/**
|
||||
* Fetch all pages between the current loaded count and targetIndex in parallel.
|
||||
* Pages are merged into the cache as they arrive (sorted by offset).
|
||||
* Failed pages are silently skipped — normal scroll will re-fetch them on demand.
|
||||
*/
|
||||
async fetchAllPagesTo(targetIndex: number): Promise<void> {
|
||||
if (this.#isCatchingUp) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pageSize = typeof this.#params.limit === 'number' ? this.#params.limit : 50;
|
||||
const key = this.buildQueryKey(this.#params);
|
||||
const existing = this.#qc.getQueryData<InfiniteData<ProxyFontsResponse, PageParam>>(key);
|
||||
|
||||
if (!existing) {
|
||||
return;
|
||||
}
|
||||
|
||||
const loadedOffsets = new Set(existing.pageParams.map(p => p.offset));
|
||||
|
||||
// Collect offsets for all missing and not-in-flight pages
|
||||
const missingOffsets: number[] = [];
|
||||
for (let offset = 0; offset <= targetIndex; offset += pageSize) {
|
||||
if (!loadedOffsets.has(offset) && !this.#inFlightOffsets.has(offset)) {
|
||||
missingOffsets.push(offset);
|
||||
}
|
||||
}
|
||||
|
||||
if (missingOffsets.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.#isCatchingUp = true;
|
||||
|
||||
// Sorted merge buffer — flush in offset order as pages arrive
|
||||
const buffer = new Map<number, ProxyFontsResponse>();
|
||||
const failed = new Set<number>();
|
||||
let nextFlushOffset = (existing.pageParams.at(-1)?.offset ?? -pageSize) + pageSize;
|
||||
|
||||
const flush = () => {
|
||||
while (buffer.has(nextFlushOffset) || failed.has(nextFlushOffset)) {
|
||||
if (buffer.has(nextFlushOffset)) {
|
||||
this.#appendPageToCache(buffer.get(nextFlushOffset)!);
|
||||
buffer.delete(nextFlushOffset);
|
||||
}
|
||||
failed.delete(nextFlushOffset);
|
||||
nextFlushOffset += pageSize;
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
await Promise.allSettled(
|
||||
missingOffsets.map(async offset => {
|
||||
this.#inFlightOffsets.add(offset);
|
||||
try {
|
||||
const page = await this.fetchPage({ ...this.#params, offset });
|
||||
buffer.set(offset, page);
|
||||
} catch {
|
||||
failed.add(offset);
|
||||
} finally {
|
||||
this.#inFlightOffsets.delete(offset);
|
||||
}
|
||||
flush();
|
||||
}),
|
||||
);
|
||||
} finally {
|
||||
this.#isCatchingUp = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Backward pagination (no-op: infinite scroll accumulates forward only)
|
||||
*/
|
||||
@@ -289,6 +363,34 @@ export class FontStore {
|
||||
return this.fonts.filter(f => f.category === 'monospace');
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge a single page into the InfiniteQuery cache in offset order.
|
||||
* Called by fetchAllPagesTo as each parallel fetch resolves.
|
||||
*/
|
||||
#appendPageToCache(page: ProxyFontsResponse): void {
|
||||
const key = this.buildQueryKey(this.#params);
|
||||
const existing = this.#qc.getQueryData<InfiniteData<ProxyFontsResponse, PageParam>>(key);
|
||||
if (!existing) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Guard against duplicates
|
||||
const loadedOffsets = new Set(existing.pageParams.map(p => p.offset));
|
||||
if (loadedOffsets.has(page.offset)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const allPages = [...existing.pages, page].sort((a, b) => a.offset - b.offset);
|
||||
const allParams = [...existing.pageParams, { offset: page.offset }].sort(
|
||||
(a, b) => a.offset - b.offset,
|
||||
);
|
||||
|
||||
this.#qc.setQueryData<InfiniteData<ProxyFontsResponse, PageParam>>(key, {
|
||||
pages: allPages,
|
||||
pageParams: allParams,
|
||||
});
|
||||
}
|
||||
|
||||
private buildQueryKey(params: FontStoreParams): readonly unknown[] {
|
||||
const filtered: Record<string, any> = {};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user