wip fix idb cache

This commit is contained in:
Rizky 2024-01-27 06:02:41 +07:00
parent 6538c67789
commit 07b5ae6155
7 changed files with 108 additions and 32 deletions

View File

@ -43,10 +43,14 @@ export const page_cache: SAction["page"]["cache"] = async function (
if (found && !exclude_page_id.includes(found.id)) {
const row = await db.page.findFirst({
where: { id: found.id },
select: { content_tree: true },
select: { content_tree: true, url: true },
});
if (row) {
result[found.id] = row.content_tree;
result[found.id] = {
root: row.content_tree,
url: row.url,
org_url: url,
};
}
}
}

View File

@ -1,16 +1,15 @@
import { NodeModel } from "@minoru/react-dnd-treeview";
import { createId } from "@paralleldrive/cuid2";
import { compress, decompress } from "wasm-gzip";
import { IItem } from "../../../../utils/types/item";
import { DComp } from "../../../../utils/types/root";
import { initLoadComp } from "../../../vi/meta/comp/init-comp-load";
import { genMeta } from "../../../vi/meta/meta";
import { isTextEditing } from "../active/is-editing";
import { IMeta, PG } from "../ed-global";
import { assignMitem } from "../tree/assign-mitem";
import { treeRebuild } from "../tree/build";
import { pushTreeNode } from "../tree/build/push-tree";
import { isTextEditing } from "../active/is-editing";
import { assignMitem } from "../tree/assign-mitem";
import { createId } from "@paralleldrive/cuid2";
import { waitUntil } from "web-utils";
export const loadcomp = {
timeout: 0 as any,

View File

@ -7,7 +7,7 @@ import { clientStartSync } from "../../../utils/sync/ws-client";
import { IItem } from "../../../utils/types/item";
import { DCode, DComp, DPage, IRoot } from "../../../utils/types/root";
import { GenMetaP, IMeta as LogicMeta } from "../../vi/utils/types";
import { createRouter } from "radix3";
export type IMeta = LogicMeta;
export const EmptySite = {
@ -148,10 +148,11 @@ export const EDGlobal = {
| "ready",
preview: {
url_cache: new Set<string>(),
page_cache: {} as Record<string, IRoot>,
route_cache: createRouter<{ url: string; id: string }>(),
page_cache: {} as Record<string, { root: IRoot; url: string }>,
meta_cache: {} as Record<
string,
{ entry: string[]; meta: Record<string, IMeta> }
{ entry: string[]; meta: Record<string, IMeta>; url: string }
>,
show_loading: false,
},

View File

@ -1,30 +1,74 @@
import { get, set } from "idb-keyval";
import { IContent } from "../../../../utils/types/general";
import { IItem, MItem } from "../../../../utils/types/item";
import { initLoadComp } from "../../../vi/meta/comp/init-comp-load";
import { genMeta } from "../../../vi/meta/meta";
import { nav } from "../../../vi/render/script/extract-nav";
import { loadCompSnapshot, loadComponent } from "../comp/load";
import { IMeta, PG, active } from "../ed-global";
import { assignMitem } from "./assign-mitem";
import { pushTreeNode } from "./build/push-tree";
export const treeCacheBuild = async (p: PG, page_id: string) => {
const root = p.preview.page_cache[page_id];
const page_cache = p.preview.page_cache[page_id];
if (page_cache && !p.preview.meta_cache[page_id]) {
const meta_cache = {
meta: {} as Record<string, IMeta>,
entry: [] as string[],
url: page_cache.url,
};
const meta_cache = {
meta: {} as Record<string, IMeta>,
entry: [] as string[],
};
for (const item of root.childs) {
meta_cache.entry.push(item.id);
genMeta(
await initLoadComp(
{
note: "tree-rebuild",
comps: p.comp.loaded,
meta: meta_cache.meta,
mode: "page",
},
{ item: item as IContent }
page_cache.root as unknown as IItem,
{
async load(comp_ids) {
const ids = comp_ids.filter((id) => !p.comp.loaded[id]);
const comps = await p.sync.comp.load(ids, true);
let result = Object.entries(comps);
for (const [id_comp, comp] of result) {
const cached = await get(`comp-${id_comp}`, nav.store);
if (cached) {
p.comp.loaded[id_comp] = cached;
}
if (comp && comp.snapshot && !p.comp.list[id_comp]) {
if (p.comp.loaded[id_comp]) {
loadCompSnapshot(p, id_comp, comp.snapshot).then(() => {
if (p.comp.loaded[id_comp]) {
set(`comp-${id_comp}`, p.comp.loaded[id_comp], nav.store);
}
});
} else {
await loadCompSnapshot(p, id_comp, comp.snapshot);
if (p.comp.loaded[id_comp]) {
set(`comp-${id_comp}`, p.comp.loaded[id_comp], nav.store);
}
}
}
}
},
}
);
for (const item of page_cache.root.childs) {
meta_cache.entry.push(item.id);
genMeta(
{
note: "cache-rebuild",
comps: p.comp.loaded,
meta: meta_cache.meta,
mode: "page",
},
{ item: item as IContent }
);
}
p.preview.meta_cache[page_id] = meta_cache;
set(`page-${page_id}`, meta_cache, nav.store);
}
p.preview.meta_cache[page_id] = meta_cache;
};
export const treeRebuild = async (p: PG, arg?: { note?: string }) => {

View File

@ -8,6 +8,9 @@ import init, { decompress } from "wasm-gzip";
import { w } from "../../utils/types/general";
import { IRoot } from "../../utils/types/root";
import { treeCacheBuild } from "../ed/logic/tree/build";
import { loadComponent } from "../ed/logic/comp/load";
import { get } from "idb-keyval";
import { nav } from "./render/script/extract-nav";
const decoder = new TextDecoder();
export const ViPreview = (arg: { pathname: string }) => {
@ -52,10 +55,11 @@ export const ViPreview = (arg: { pathname: string }) => {
const mode = p.mode;
if (!w.isEditor) {
if (!w.isEditor && !p.preview.meta_cache[params.page_id]) {
p.preview.meta_cache[params.page_id] = {
meta: p.page.meta,
entry: p.page.entry,
url: p.page.cur.url,
};
}
@ -113,20 +117,23 @@ export const ViPreview = (arg: { pathname: string }) => {
}
if (load_urls.length > 0) {
console.log(load_urls);
const res = await p.sync.page.cache(
p.site.id,
load_urls,
Object.keys(p.preview.page_cache)
);
if (res) {
const pages = JSON.parse(
decoder.decode(decompress(res.gzip)) || "{}"
);
) as Record<
string,
{ root: IRoot; url: string; org_url: string }
>;
for (const [id, page] of Object.entries(pages)) {
p.preview.page_cache[id] = page as IRoot;
treeCacheBuild(p, params.page_id);
p.preview.page_cache[id] = page;
await treeCacheBuild(p, id);
}
}
}
@ -172,7 +179,17 @@ const viRoute = async (p: PG) => {
p.script.init_local_effect = {};
if (!w.isEditor) {
const page_cache = p.preview.meta_cache[params.page_id];
let page_cache = p.preview.meta_cache[params.page_id];
let should_render = false;
if (!page_cache) {
const idb_cache = await get(`page-${params.page_id}`, nav.store);
if (idb_cache) {
page_cache = idb_cache;
p.preview.meta_cache[params.page_id] = idb_cache;
}
should_render = true;
}
if (page_cache) {
p.page.meta = page_cache.meta;
@ -182,7 +199,7 @@ const viRoute = async (p: PG) => {
p.page.cur = { id: params.page_id } as any;
}
p.status = "ready";
p.render();
if (should_render) p.render();
return;
}
}

View File

@ -1,6 +1,8 @@
import { createStore } from "idb-keyval";
import { VG } from "../global";
const nav = { timeout: null as any };
const store = createStore(`prasi-cache`, `prasi-cache-store`);
export const nav = { timeout: null as any, store };
export const extractNavigate = (
vi: { page: VG["page"]; on_nav_loaded?: VG["on_nav_loaded"] },

View File

@ -1,12 +1,12 @@
import { FC, Suspense, useEffect, useState } from "react";
import { FC, Suspense, useEffect } from "react";
import { useGlobal } from "web-utils";
import { IMeta } from "../ed/logic/ed-global";
import { viLoad } from "./load/load";
import { VG, ViGlobal } from "./render/global";
import { render_stat } from "./render/render";
import { nav } from "./render/script/extract-nav";
import { ViRoot } from "./root";
import { ErrorBox } from "./utils/error-box";
import { render_stat } from "./render/render";
import { IRoot } from "../../utils/types/root";
const w = window as any;
export const Vi: FC<{
@ -46,11 +46,20 @@ export const Vi: FC<{
w.isMobile = mode === "mobile";
w.isDesktop = mode === "desktop";
w.preload = (urls: string[]) => {
if (!vi.page.navs[page_id]) vi.page.navs[page_id] = new Set();
for (const url of urls) {
vi.page.navs[page_id].add(url);
}
clearTimeout(nav.timeout);
nav.timeout = setTimeout(() => {
if (vi.on_nav_loaded) {
vi.on_nav_loaded({
urls: Array.from(vi.page.navs[page_id]),
});
}
}, 100);
};
vi.page.cur.id = page_id;
vi.on_status_changes = on_status_changed;