wip fix
This commit is contained in:
parent
57b4bf9c24
commit
994ba5e608
|
|
@ -12,8 +12,11 @@ RUN apt-get install nodejs -yq
|
||||||
RUN npm i -g @parcel/watcher node-gyp-build-optional-packages node-gyp pnpm
|
RUN npm i -g @parcel/watcher node-gyp-build-optional-packages node-gyp pnpm
|
||||||
RUN PATH="/usr/lib/node_modules/npm/bin:$PATH"
|
RUN PATH="/usr/lib/node_modules/npm/bin:$PATH"
|
||||||
|
|
||||||
COPY . .
|
COPY dockerzip .
|
||||||
|
RUN unzip -o dockerzip
|
||||||
RUN bun install
|
RUN bun install
|
||||||
|
|
||||||
|
COPY . .
|
||||||
RUN bun run build
|
RUN bun run build
|
||||||
|
|
||||||
EXPOSE 4550/tcp
|
EXPOSE 4550/tcp
|
||||||
|
|
|
||||||
|
|
@ -13,62 +13,63 @@ import { createId } from "@paralleldrive/cuid2";
|
||||||
import { prepareApiRoutes } from "./server/api/api-scan";
|
import { prepareApiRoutes } from "./server/api/api-scan";
|
||||||
import { writeAsync } from "fs-jetpack";
|
import { writeAsync } from "fs-jetpack";
|
||||||
import { dir } from "dir";
|
import { dir } from "dir";
|
||||||
|
import "../docker-prep";
|
||||||
|
|
||||||
g.status = "init";
|
g.status = "init";
|
||||||
|
|
||||||
await writeAsync(
|
await writeAsync(
|
||||||
dir.path("app/web/timestamp.ts"),
|
dir.path("app/web/timestamp.ts"),
|
||||||
`export const version = "${createId().substring(0, 7)}";`,
|
`export const version = "${createId().substring(0, 7)}";`
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!g.Y) {
|
if (!g.Y) {
|
||||||
g.Y = await import("yjs");
|
g.Y = await import("yjs");
|
||||||
g.syncronize = (await import("y-pojo")).syncronize;
|
g.syncronize = (await import("y-pojo")).syncronize;
|
||||||
|
|
||||||
await createLogger();
|
await createLogger();
|
||||||
g.api = {};
|
g.api = {};
|
||||||
g.mode = process.argv.includes("dev") ? "dev" : "prod";
|
g.mode = process.argv.includes("dev") ? "dev" : "prod";
|
||||||
g.datadir = g.mode == "prod" ? "../data" : "data";
|
g.datadir = g.mode == "prod" ? "../data" : "data";
|
||||||
g.port = parseInt(process.env.PORT || "4550");
|
g.port = parseInt(process.env.PORT || "4550");
|
||||||
|
|
||||||
g.log.info(g.mode === "dev" ? "DEVELOPMENT" : "PRODUCTION");
|
g.log.info(g.mode === "dev" ? "DEVELOPMENT" : "PRODUCTION");
|
||||||
if (g.mode === "dev") {
|
if (g.mode === "dev") {
|
||||||
await startDevWatcher();
|
await startDevWatcher();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** init lmdb */
|
/** init lmdb */
|
||||||
user.conf.init();
|
user.conf.init();
|
||||||
snapshot.init();
|
snapshot.init();
|
||||||
}
|
}
|
||||||
|
|
||||||
const db = g.db;
|
const db = g.db;
|
||||||
if (!db) {
|
if (!db) {
|
||||||
await preparePrisma();
|
await preparePrisma();
|
||||||
await ensureNotRunning();
|
await ensureNotRunning();
|
||||||
const db = g.db;
|
const db = g.db;
|
||||||
if (db) {
|
if (db) {
|
||||||
db.$connect()
|
db.$connect()
|
||||||
.catch((e: any) => {
|
.catch((e: any) => {
|
||||||
g.log.error(`[DB ERROR]\n${e.message}`);
|
g.log.error(`[DB ERROR]\n${e.message}`);
|
||||||
})
|
})
|
||||||
.then(() => {
|
.then(() => {
|
||||||
g.log.info("Database connected");
|
g.log.info("Database connected");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!g.apiPrepared) {
|
if (!g.apiPrepared) {
|
||||||
await initSrv();
|
await initSrv();
|
||||||
await syncActionDefinition();
|
await syncActionDefinition();
|
||||||
g.log.info("WS Action defined");
|
g.log.info("WS Action defined");
|
||||||
await prepareApiRoutes();
|
await prepareApiRoutes();
|
||||||
await prepareAPITypes();
|
await prepareAPITypes();
|
||||||
g.log.info("API Prepared");
|
g.log.info("API Prepared");
|
||||||
g.apiPrepared = true;
|
g.apiPrepared = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!g.parcel) {
|
if (!g.parcel) {
|
||||||
await parcelBuild();
|
await parcelBuild();
|
||||||
}
|
}
|
||||||
|
|
||||||
const { createServer } = await import("./server/create");
|
const { createServer } = await import("./server/create");
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,53 @@
|
||||||
|
import { $ } from "execa";
|
||||||
|
import fs from "fs";
|
||||||
|
import {
|
||||||
|
copyAsync,
|
||||||
|
dirAsync,
|
||||||
|
existsAsync,
|
||||||
|
removeAsync
|
||||||
|
} from "fs-jetpack";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
const dir = {
|
||||||
|
path(...allpath: any[]) {
|
||||||
|
return path.join(process.cwd(), ...allpath);
|
||||||
|
},
|
||||||
|
read(dirPath: string, baseDir?: string[]): Record<string, string> {
|
||||||
|
const result: Record<string, string> = {};
|
||||||
|
|
||||||
|
const contents = fs.readdirSync(dirPath);
|
||||||
|
|
||||||
|
for (const item of contents) {
|
||||||
|
const itemPath = path.join(dirPath, item);
|
||||||
|
const stats = fs.statSync(itemPath);
|
||||||
|
|
||||||
|
if (stats.isFile()) {
|
||||||
|
// const content = fs.readFileSync(itemPath, "utf-8");
|
||||||
|
result[[...(baseDir || []), item].join("/")] = "";
|
||||||
|
} else if (stats.isDirectory()) {
|
||||||
|
if (item !== "node_modules" && item !== ".git") {
|
||||||
|
const subdirResult = dir.read(itemPath, [...(baseDir || []), item]);
|
||||||
|
Object.assign(result, subdirResult);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!(await existsAsync(dir.path("_tmp_docker")))) {
|
||||||
|
for (const file of Object.keys(dir.read(dir.path``))) {
|
||||||
|
if (file.endsWith("package.json")) {
|
||||||
|
await dirAsync(dir.path("_tmp_docker", path.dirname(file)));
|
||||||
|
await copyAsync(dir.path(file), dir.path("_tmp_docker", file), {
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await copyAsync(dir.path("bun.lockb"), dir.path("_tmp_docker", "bun.lockb"));
|
||||||
|
|
||||||
|
await $({ cwd: dir.path("_tmp_docker") })`zip -r ../docker .`;
|
||||||
|
await $`mv docker.zip dockerzip`;
|
||||||
|
await removeAsync(dir.path("_tmp_docker"));
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue