wip fix
This commit is contained in:
parent
ff6efc0518
commit
aa7465a741
|
|
@ -1,9 +1,9 @@
|
|||
import { dir } from "dir";
|
||||
import { apiContext } from "service-srv";
|
||||
import { validate } from "uuid";
|
||||
import { prodIndex } from "../util/prod-index";
|
||||
import { code } from "../ws/sync/editor/code/util-code";
|
||||
import { gzipAsync } from "../ws/sync/entity/zlib";
|
||||
import { prodIndex } from "../util/prod-index";
|
||||
|
||||
export const _ = {
|
||||
url: "/prod/:site_id/**",
|
||||
|
|
@ -78,7 +78,9 @@ export const _ = {
|
|||
},
|
||||
select: { url: true, id: true },
|
||||
});
|
||||
return gzipAsync(
|
||||
|
||||
return await responseCompressed(
|
||||
req,
|
||||
JSON.stringify({
|
||||
site: { ...site, api_url },
|
||||
urls,
|
||||
|
|
@ -96,7 +98,8 @@ export const _ = {
|
|||
select: { content_tree: true, url: true },
|
||||
});
|
||||
if (page) {
|
||||
return gzipAsync(
|
||||
return await responseCompressed(
|
||||
req,
|
||||
JSON.stringify({
|
||||
id: page_id,
|
||||
root: page.content_tree,
|
||||
|
|
@ -116,7 +119,8 @@ export const _ = {
|
|||
select: { id: true, content_tree: true, url: true },
|
||||
});
|
||||
if (pages) {
|
||||
return gzipAsync(
|
||||
return await responseCompressed(
|
||||
req,
|
||||
JSON.stringify(
|
||||
pages.map((e) => ({
|
||||
id: e.id,
|
||||
|
|
@ -141,7 +145,7 @@ export const _ = {
|
|||
result[comp.id] = comp.content_tree;
|
||||
}
|
||||
}
|
||||
return gzipAsync(JSON.stringify(result) as any);
|
||||
return await responseCompressed(req, JSON.stringify(result) as any);
|
||||
}
|
||||
}
|
||||
return new Response("action " + action + ": not found");
|
||||
|
|
@ -157,3 +161,13 @@ export const _ = {
|
|||
}
|
||||
},
|
||||
};
|
||||
|
||||
const responseCompressed = async (req: Request, body: string) => {
|
||||
if (req.headers.get("accept-encoding")?.includes("gz")) {
|
||||
return new Response(await gzipAsync(body), {
|
||||
headers: { "content-encoding": "gzip" },
|
||||
});
|
||||
}
|
||||
|
||||
return new Response(body);
|
||||
};
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -3,7 +3,6 @@ import { IContent } from "../../../utils/types/general";
|
|||
import { IItem } from "../../../utils/types/item";
|
||||
import { ISection } from "../../../utils/types/section";
|
||||
import { base } from "./base";
|
||||
import { decompressBlob } from "./util";
|
||||
import { prodCache } from "./cache";
|
||||
|
||||
export const scanComponent = async (items: IContent[]) => {
|
||||
|
|
@ -38,15 +37,12 @@ export const scanComponent = async (items: IContent[]) => {
|
|||
|
||||
if (comp.pending.size > 0) {
|
||||
try {
|
||||
const raw = await (
|
||||
const res = (await (
|
||||
await fetch(base.url`_prasi/comp`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ ids: [...comp.pending] }),
|
||||
})
|
||||
).blob();
|
||||
const res = JSON.parse(
|
||||
await (await decompressBlob(raw)).text()
|
||||
) as Record<string, IItem>;
|
||||
).json()) as Record<string, IItem>;
|
||||
for (const [id, item] of Object.entries(res)) {
|
||||
comp.pending.delete(id);
|
||||
comp.list[id] = item;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import { base } from "./base";
|
||||
import { IRoot } from "../../../utils/types/root";
|
||||
import { decompressBlob } from "./util";
|
||||
import { prodCache } from "./cache";
|
||||
import { get, set } from "idb-keyval";
|
||||
|
||||
|
|
@ -17,8 +16,9 @@ export const loadPage = (page_id: string) => {
|
|||
returned = true;
|
||||
}
|
||||
|
||||
const raw = await (await fetch(base.url`_prasi/page/${page_id}`)).blob();
|
||||
const res = JSON.parse(await (await decompressBlob(raw)).text()) as {
|
||||
const res = (await (
|
||||
await fetch(base.url`_prasi/page/${page_id}`)
|
||||
).json()) as {
|
||||
id: string;
|
||||
url: string;
|
||||
root: IRoot;
|
||||
|
|
@ -57,13 +57,12 @@ export const loadPages = (page_ids: string[]) => {
|
|||
done(result);
|
||||
}
|
||||
|
||||
const raw = await (
|
||||
const res = (await (
|
||||
await fetch(base.url`_prasi/pages`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ ids }),
|
||||
})
|
||||
).blob();
|
||||
const res = JSON.parse(await (await decompressBlob(raw)).text()) as {
|
||||
).json()) as {
|
||||
id: string;
|
||||
url: string;
|
||||
root: IRoot;
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { get, set } from "idb-keyval";
|
||||
import { createRouter } from "radix3";
|
||||
import { apiProxy } from "../../../base/load/api/api-proxy";
|
||||
import { dbProxy } from "../../../base/load/db/db-proxy";
|
||||
|
|
@ -5,9 +6,7 @@ import { IRoot } from "../../../utils/types/root";
|
|||
import { genMeta } from "../../vi/meta/meta";
|
||||
import { IMeta } from "../../vi/utils/types";
|
||||
import { base } from "./base";
|
||||
import { decompressBlob } from "./util";
|
||||
import { prodCache } from "./cache";
|
||||
import { get, set } from "idb-keyval";
|
||||
|
||||
const getRoute = () => {
|
||||
return new Promise<{
|
||||
|
|
@ -25,11 +24,7 @@ const getRoute = () => {
|
|||
is_done = true;
|
||||
}
|
||||
|
||||
let raw = await (await fetch(base.url`_prasi/route`)).blob();
|
||||
const dc = decompressBlob(raw);
|
||||
const unziped = await dc;
|
||||
const res = JSON.parse(await unziped.text());
|
||||
|
||||
let res = await (await fetch(base.url`_prasi/route`)).json();
|
||||
await set("route", res, prodCache);
|
||||
if (!is_done) {
|
||||
done(res);
|
||||
|
|
|
|||
|
|
@ -1,5 +0,0 @@
|
|||
export async function decompressBlob(blob: Blob) {
|
||||
let ds = new DecompressionStream("gzip");
|
||||
let decompressedStream = blob.stream().pipeThrough(ds);
|
||||
return await new Response(decompressedStream).blob();
|
||||
}
|
||||
Loading…
Reference in New Issue