fix brotli cache
This commit is contained in:
parent
2b9c84270f
commit
d781756ab9
|
|
@ -13,7 +13,7 @@ export const _ = {
|
|||
const { req, res } = apiContext(this);
|
||||
res.setHeader("Access-Control-Allow-Origin", "*");
|
||||
res.setHeader("Access-Control-Allow-Headers", "content-type");
|
||||
const gz = g.deploy.gz;
|
||||
const gz = g.deploy.content;
|
||||
const parts = req.params._.split("/");
|
||||
|
||||
const action = {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import brotliPromise from "brotli-wasm"; // Import the default export
|
||||
import { simpleHash } from "utils/cache";
|
||||
import { g } from "utils/global";
|
||||
const brotli = await brotliPromise;
|
||||
import { loadCachedBr } from "utils/br-load";
|
||||
|
||||
const parseQueryParams = (ctx: any) => {
|
||||
const pageHref = ctx.req.url;
|
||||
|
|
@ -29,7 +28,7 @@ export const apiContext = (ctx: any) => {
|
|||
res: {
|
||||
...ctx.res,
|
||||
send: (body, cache_accept?: string) => {
|
||||
ctx.res = createResponse(ctx.res, body, cache_accept);
|
||||
ctx.res = createResponse(body, { cache_accept, res: ctx.res });
|
||||
},
|
||||
sendStatus: (code: number) => {
|
||||
ctx.res._status = code;
|
||||
|
|
@ -50,30 +49,45 @@ export const apiContext = (ctx: any) => {
|
|||
};
|
||||
|
||||
export const createResponse = (
|
||||
existingRes: any,
|
||||
body: any,
|
||||
cache_accept?: string
|
||||
opt?: {
|
||||
cache_accept?: string;
|
||||
headers?: any;
|
||||
res?: any;
|
||||
}
|
||||
) => {
|
||||
const status =
|
||||
typeof existingRes._status === "number" ? existingRes._status : undefined;
|
||||
typeof opt?.res?._status === "number" ? opt?.res?._status : undefined;
|
||||
|
||||
let content: any = typeof body === "string" ? body : JSON.stringify(body);
|
||||
const headers = {} as Record<string, string>;
|
||||
if (cache_accept) {
|
||||
if (g.mode === "prod" && cache_accept.toLowerCase().includes("br")) {
|
||||
const headers = { ...(opt?.headers || {}) } as Record<string, string>;
|
||||
if (opt?.cache_accept) {
|
||||
let cached = false;
|
||||
if (opt.cache_accept.toLowerCase().includes("br")) {
|
||||
const content_hash = simpleHash(content);
|
||||
|
||||
if (!g.cache.br[content_hash]) {
|
||||
loadCachedBr(content_hash, content);
|
||||
}
|
||||
|
||||
if (g.cache.br[content_hash]) {
|
||||
cached = true;
|
||||
content = g.cache.br[content_hash];
|
||||
headers["content-encoding"] = "br";
|
||||
} else {
|
||||
if (!g.cache.br_timeout.has(content_hash)) {
|
||||
g.cache.br_timeout.add(content_hash);
|
||||
setTimeout(() => {
|
||||
g.cache.br[content_hash] = brotli.compress(Buffer.from(content));
|
||||
g.cache.br_timeout.delete(content_hash);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!cached && opt.cache_accept.toLowerCase().includes("gz")) {
|
||||
const content_hash = simpleHash(content);
|
||||
|
||||
if (!g.cache.gz[content_hash]) {
|
||||
g.cache.gz[content_hash] = Bun.gzipSync(content);
|
||||
}
|
||||
|
||||
if (g.cache.gz[content_hash]) {
|
||||
cached = true;
|
||||
content = g.cache.gz[content_hash];
|
||||
headers["content-encoding"] = "gzip";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -90,10 +104,12 @@ export const createResponse = (
|
|||
for (const [k, v] of Object.entries(headers)) {
|
||||
res.headers.append(k, v);
|
||||
}
|
||||
const cur = existingRes as Response;
|
||||
cur.headers.forEach((value, key) => {
|
||||
res.headers.append(key, value);
|
||||
});
|
||||
const cur = opt?.res as Response;
|
||||
if (cur) {
|
||||
cur.headers.forEach((value, key) => {
|
||||
res.headers.append(key, value);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof body === "object" && !res.headers.has("content-type")) {
|
||||
res.headers.append("content-type", "application/json");
|
||||
|
|
|
|||
|
|
@ -14,7 +14,12 @@ export const createServer = async () => {
|
|||
g.api = {};
|
||||
g.cache = {
|
||||
br: {},
|
||||
br_timeout: new Set(),
|
||||
gz: {},
|
||||
br_progress: {
|
||||
pending: {},
|
||||
running: false,
|
||||
timeout: null,
|
||||
},
|
||||
};
|
||||
const scan = async (path: string, root?: string) => {
|
||||
const apis = await listAsync(path);
|
||||
|
|
@ -71,7 +76,7 @@ export const createServer = async () => {
|
|||
maxRequestBodySize: 1024 * 1024 * 128,
|
||||
async fetch(req) {
|
||||
const url = new URL(req.url) as URL;
|
||||
url.pathname = url.pathname.replace(/\/+/g, '/')
|
||||
url.pathname = url.pathname.replace(/\/+/g, "/");
|
||||
|
||||
const prasi = {};
|
||||
const index = prodIndex(g.deploy.config.site_id, prasi);
|
||||
|
|
@ -89,12 +94,13 @@ export const createServer = async () => {
|
|||
return await serveWeb({
|
||||
content: index.render(),
|
||||
pathname: "index.html",
|
||||
cache_accept: req.headers.get("accept-encoding") || "",
|
||||
});
|
||||
}
|
||||
|
||||
if (g.deploy.gz) {
|
||||
const core = g.deploy.gz.code.core;
|
||||
const site = g.deploy.gz.code.site;
|
||||
if (g.deploy.content) {
|
||||
const core = g.deploy.content.code.core;
|
||||
const site = g.deploy.content.code.site;
|
||||
|
||||
let pathname = url.pathname;
|
||||
if (url.pathname[0] === "/") pathname = pathname.substring(1);
|
||||
|
|
@ -107,6 +113,7 @@ export const createServer = async () => {
|
|||
return await serveWeb({
|
||||
content: index.render(),
|
||||
pathname: "index.html",
|
||||
cache_accept: req.headers.get("accept-encoding") || "",
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -116,7 +123,11 @@ export const createServer = async () => {
|
|||
else if (site[pathname]) content = site[pathname];
|
||||
|
||||
if (content) {
|
||||
return await serveWeb({ content, pathname });
|
||||
return await serveWeb({
|
||||
content,
|
||||
pathname,
|
||||
cache_accept: req.headers.get("accept-encoding") || "",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ export const serveAPI = async (url: URL, req: Request) => {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
for (const [k, v] of Object.entries(json)) {
|
||||
for (const [k, v] of Object.entries(json as object)) {
|
||||
params[k] = v;
|
||||
}
|
||||
for (const [k, v] of Object.entries(params)) {
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
import mime from "mime";
|
||||
import { createResponse } from "service-srv";
|
||||
|
||||
export const serveWeb = async (arg: { pathname: string; content: string }) => {
|
||||
export const serveWeb = async (arg: {
|
||||
pathname: string;
|
||||
content: string;
|
||||
cache_accept: string;
|
||||
}) => {
|
||||
const type = mime.getType(arg.pathname);
|
||||
|
||||
return new Response(arg.content, {
|
||||
return createResponse(arg.content, {
|
||||
cache_accept: arg.cache_accept,
|
||||
headers: !type ? undefined : { "content-type": type },
|
||||
});
|
||||
};
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
import brotliPromise from "brotli-wasm"; // Import the default export
|
||||
import { g } from "./global";
|
||||
import { dir } from "./dir";
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
const brotli = await brotliPromise;
|
||||
export const loadCachedBr = (hash: string, content: string) => {
|
||||
if (!g.cache.br[hash]) {
|
||||
if (!g.cache.br_progress.pending[hash]) {
|
||||
g.cache.br_progress.pending[hash] = content;
|
||||
recurseCompressBr();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const recurseCompressBr = () => {
|
||||
clearTimeout(g.cache.br_progress.timeout);
|
||||
g.cache.br_progress.timeout = setTimeout(async () => {
|
||||
if (g.cache.br_progress.running) {
|
||||
return;
|
||||
}
|
||||
|
||||
g.cache.br_progress.running = true;
|
||||
const entries = Object.entries(g.cache.br_progress.pending);
|
||||
if (entries.length > 0) {
|
||||
const [hash, content] = entries.shift() as [string, string | Uint8Array];
|
||||
|
||||
const file = Bun.file(dir(`${g.datadir}/br-cache/${hash}`));
|
||||
if (await file.exists()) {
|
||||
g.cache.br[hash] = new Uint8Array(await file.arrayBuffer());
|
||||
} else {
|
||||
g.cache.br[hash] = brotli.compress(
|
||||
typeof content === "string" ? encoder.encode(content) : content,
|
||||
{ quality: 11 }
|
||||
);
|
||||
await Bun.write(file, g.cache.br[hash]);
|
||||
}
|
||||
delete g.cache.br_progress.pending[hash];
|
||||
g.cache.br_progress.running = false;
|
||||
recurseCompressBr();
|
||||
}
|
||||
}, 50);
|
||||
};
|
||||
|
|
@ -27,7 +27,7 @@ export const deploy = {
|
|||
console.log(`Loading site: ${this.config.site_id} ${ts}`);
|
||||
|
||||
try {
|
||||
g.deploy.gz = JSON.parse(
|
||||
g.deploy.content = JSON.parse(
|
||||
decoder.decode(
|
||||
await gunzipAsync(
|
||||
new Uint8Array(
|
||||
|
|
@ -37,45 +37,45 @@ export const deploy = {
|
|||
)
|
||||
);
|
||||
|
||||
if (g.deploy.gz) {
|
||||
if (g.deploy.content) {
|
||||
if (exists(dir("public"))) {
|
||||
await removeAsync(dir("public"));
|
||||
if (g.deploy.gz.public) {
|
||||
if (g.deploy.content.public) {
|
||||
await dirAsync(dir("public"));
|
||||
for (const [k, v] of Object.entries(g.deploy.gz.public)) {
|
||||
for (const [k, v] of Object.entries(g.deploy.content.public)) {
|
||||
await writeAsync(dir(`public/${k}`), v);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const page of g.deploy.gz.layouts) {
|
||||
for (const page of g.deploy.content.layouts) {
|
||||
if (page.is_default_layout) {
|
||||
g.deploy.layout = page.content_tree;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!g.deploy.layout && g.deploy.gz.layouts.length > 0) {
|
||||
g.deploy.layout = g.deploy.gz.layouts[0].content_tree;
|
||||
if (!g.deploy.layout && g.deploy.content.layouts.length > 0) {
|
||||
g.deploy.layout = g.deploy.content.layouts[0].content_tree;
|
||||
}
|
||||
|
||||
g.deploy.router = createRouter();
|
||||
g.deploy.pages = {};
|
||||
for (const page of g.deploy.gz.pages) {
|
||||
for (const page of g.deploy.content.pages) {
|
||||
g.deploy.pages[page.id] = page;
|
||||
g.deploy.router.insert(page.url, page);
|
||||
}
|
||||
|
||||
g.deploy.comps = {};
|
||||
for (const comp of g.deploy.gz.comps) {
|
||||
for (const comp of g.deploy.content.comps) {
|
||||
g.deploy.comps[comp.id] = comp.content_tree;
|
||||
}
|
||||
|
||||
if (g.deploy.gz.code.server) {
|
||||
if (g.deploy.content.code.server) {
|
||||
setTimeout(async () => {
|
||||
if (g.deploy.gz) {
|
||||
if (g.deploy.content) {
|
||||
delete require.cache[dir(`app/web/server/index.js`)];
|
||||
await removeAsync(dir(`app/web/server`));
|
||||
await dirAsync(dir(`app/web/server`));
|
||||
for (const [k, v] of Object.entries(g.deploy.gz.code.server)) {
|
||||
for (const [k, v] of Object.entries(g.deploy.content.code.server)) {
|
||||
await writeAsync(dir(`app/web/server/${k}`), v);
|
||||
}
|
||||
|
||||
|
|
@ -139,7 +139,7 @@ export const deploy = {
|
|||
config: { deploy: { ts: "" }, site_id: "" },
|
||||
init: false,
|
||||
raw: null,
|
||||
gz: null,
|
||||
content: null,
|
||||
server: null,
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -70,7 +70,8 @@ export const g = global as unknown as {
|
|||
};
|
||||
cache: {
|
||||
br: Record<string, Uint8Array>;
|
||||
br_timeout: Set<string>;
|
||||
br_progress: { pending: Record<string, any>; running: boolean; timeout: any };
|
||||
gz: Record<string, Uint8Array>;
|
||||
};
|
||||
createServer: (
|
||||
arg: PrasiServer & { api: any; db: any }
|
||||
|
|
@ -85,7 +86,7 @@ export const g = global as unknown as {
|
|||
string,
|
||||
{ id: string; url: string; name: true; content_tree: any }
|
||||
>;
|
||||
gz: null | {
|
||||
content: null | {
|
||||
layouts: SinglePage[];
|
||||
pages: SinglePage[];
|
||||
site: any;
|
||||
|
|
|
|||
Loading…
Reference in New Issue