Cavnas: Implement basic service discovery logic
Change-Id: I71b25076dba94d6491ad4db748b259870991c526
diff --git a/apps/canvas/back/src/github.ts b/apps/canvas/back/src/github.ts
index b758aca..7657043 100644
--- a/apps/canvas/back/src/github.ts
+++ b/apps/canvas/back/src/github.ts
@@ -55,6 +55,27 @@
async addDeployKey(repoPath: string, key: string) {
const sshUrl = repoPath;
const repoOwnerAndName = sshUrl.replace("git@github.com:", "").replace(".git", "");
+ let existingKeys: z.infer<typeof DeployKeysSchema> = [];
+ const response = await axios.get(`https://api.github.com/repos/${repoOwnerAndName}/keys`, {
+ headers: this.getHeaders(),
+ });
+ const parsedResult = DeployKeysSchema.safeParse(response.data);
+ if (parsedResult.success) {
+ existingKeys = parsedResult.data;
+ } else {
+ console.error("Failed to parse existing deploy keys:", parsedResult.error);
+ }
+ const keyToAddParts = key.trim().split(" ");
+ const mainKeyPartToAdd = keyToAddParts.length > 1 ? keyToAddParts.slice(0, 2).join(" ") : key.trim();
+ const keyAlreadyExists = existingKeys.some((existingKeyObj) => {
+ const existingKeyParts = existingKeyObj.key.trim().split(" ");
+ const mainExistingKeyPart =
+ existingKeyParts.length > 1 ? existingKeyParts.slice(0, 2).join(" ") : existingKeyObj.key.trim();
+ return mainExistingKeyPart === mainKeyPartToAdd;
+ });
+ if (keyAlreadyExists) {
+ return;
+ }
await axios.post(
`https://api.github.com/repos/${repoOwnerAndName}/keys`,
{
@@ -76,15 +97,42 @@
});
const result = DeployKeysSchema.safeParse(response.data);
if (!result.success) {
- throw new Error("Failed to parse deploy keys response");
+ console.error("Failed to parse deploy keys response for removal:", result.error);
+ // Depending on desired robustness, you might throw an error or return early.
+ // For now, if parsing fails, we can't identify keys to remove.
+ throw new Error("Failed to parse deploy keys response during removal process");
}
- const deployKeys = result.data.filter((k) => k.key === key);
+
+ // Extract the main part of the key we intend to remove
+ const keyToRemoveParts = key.trim().split(" ");
+ const mainKeyPartToRemove = keyToRemoveParts.length > 1 ? keyToRemoveParts.slice(0, 2).join(" ") : key.trim();
+
+ const deployKeysToDelete = result.data.filter((existingKeyObj) => {
+ const existingKeyParts = existingKeyObj.key.trim().split(" ");
+ const mainExistingKeyPart =
+ existingKeyParts.length > 1 ? existingKeyParts.slice(0, 2).join(" ") : existingKeyObj.key.trim();
+ return mainExistingKeyPart === mainKeyPartToRemove;
+ });
+
+ if (deployKeysToDelete.length === 0) {
+ console.log(
+ `No deploy key matching '${mainKeyPartToRemove.substring(0, 50)}...' found in repo ${repoOwnerAndName} for removal.`,
+ );
+ return;
+ }
+
await Promise.all(
- deployKeys.map((deployKey) =>
- axios.delete(`https://api.github.com/repos/${repoOwnerAndName}/keys/${deployKey.id}`, {
+ deployKeysToDelete.map((deployKey) => {
+ console.log(
+ `Removing deploy key ID ${deployKey.id} ('${deployKey.key.substring(0, 50)}...') from repo ${repoOwnerAndName}`,
+ );
+ return axios.delete(`https://api.github.com/repos/${repoOwnerAndName}/keys/${deployKey.id}`, {
headers: this.getHeaders(),
- }),
- ),
+ });
+ }),
+ );
+ console.log(
+ `Successfully initiated removal of ${deployKeysToDelete.length} matching deploy key(s) from ${repoOwnerAndName}.`,
);
}
diff --git a/apps/canvas/back/src/index.ts b/apps/canvas/back/src/index.ts
index cfd8955..daeded2 100644
--- a/apps/canvas/back/src/index.ts
+++ b/apps/canvas/back/src/index.ts
@@ -1,11 +1,28 @@
import { PrismaClient } from "@prisma/client";
import express from "express";
+import fs from "node:fs";
import { env } from "node:process";
import axios from "axios";
import { GithubClient } from "./github";
import { AppManager } from "./app_manager";
import { z } from "zod";
import { ProjectMonitor, WorkerSchema } from "./project_monitor";
+import tmp from "tmp";
+import { NodeJSAnalyzer } from "./lib/nodejs";
+import shell from "shelljs";
+import { RealFileSystem } from "./lib/fs";
+import path from "node:path";
+
+async function generateKey(root: string): Promise<[string, string]> {
+ const privKeyPath = path.join(root, "key");
+ const pubKeyPath = path.join(root, "key.pub");
+ if (shell.exec(`ssh-keygen -t ed25519 -f ${privKeyPath} -N ""`).code !== 0) {
+ throw new Error("Failed to generate SSH key pair");
+ }
+ const publicKey = await fs.promises.readFile(pubKeyPath, "utf8");
+ const privateKey = await fs.promises.readFile(privKeyPath, "utf8");
+ return [publicKey, privateKey];
+}
const db = new PrismaClient();
const appManager = new AppManager();
@@ -14,10 +31,14 @@
const handleProjectCreate: express.Handler = async (req, resp) => {
try {
+ const tmpDir = tmp.dirSync().name;
+ const [publicKey, privateKey] = await generateKey(tmpDir);
const { id } = await db.project.create({
data: {
userId: resp.locals.userId,
name: req.body.name,
+ deployKey: privateKey,
+ deployKeyPublic: publicKey,
},
});
resp.status(200);
@@ -133,7 +154,11 @@
};
}
-const handleDelete: express.Handler = async (req, resp) => {
+const projectDeleteReqSchema = z.object({
+ state: z.optional(z.nullable(z.string())),
+});
+
+const handleProjectDelete: express.Handler = async (req, resp) => {
try {
const projectId = Number(req.params["projectId"]);
const p = await db.project.findUnique({
@@ -143,25 +168,51 @@
},
select: {
instanceId: true,
+ githubToken: true,
+ deployKeyPublic: true,
+ state: true,
+ draft: true,
},
});
if (p === null) {
resp.status(404);
return;
}
- let ok = false;
- if (p.instanceId === null) {
- ok = true;
- } else {
- ok = await appManager.removeInstance(p.instanceId);
+ const parseResult = projectDeleteReqSchema.safeParse(req.body);
+ if (!parseResult.success) {
+ resp.status(400);
+ resp.write(JSON.stringify({ error: "Invalid request body", issues: parseResult.error.format() }));
+ return;
}
- if (ok) {
- await db.project.delete({
- where: {
- id: projectId,
- },
- });
+ if (p.githubToken && p.deployKeyPublic) {
+ const allRepos = [
+ ...new Set([
+ ...extractGithubRepos(p.state),
+ ...extractGithubRepos(p.draft),
+ ...extractGithubRepos(parseResult.data.state),
+ ]),
+ ];
+ if (allRepos.length > 0) {
+ const diff: RepoDiff = { toDelete: allRepos, toAdd: [] };
+ const github = new GithubClient(p.githubToken);
+ await manageGithubRepos(github, diff, p.deployKeyPublic, env.PUBLIC_ADDR);
+ console.log(
+ `Attempted to remove deploy keys for project ${projectId} from associated GitHub repositories.`,
+ );
+ }
}
+ if (p.instanceId !== null) {
+ if (!(await appManager.removeInstance(p.instanceId))) {
+ resp.status(500);
+ resp.write(JSON.stringify({ error: "Failed to remove deployment from cluster" }));
+ return;
+ }
+ }
+ await db.project.delete({
+ where: {
+ id: projectId,
+ },
+ });
resp.status(200);
} catch (e) {
console.log(e);
@@ -171,7 +222,7 @@
}
};
-function extractGithubRepos(serializedState: string | null): string[] {
+function extractGithubRepos(serializedState: string | null | undefined): string[] {
if (!serializedState) {
return [];
}
@@ -248,6 +299,7 @@
instanceId: true,
githubToken: true,
deployKey: true,
+ deployKeyPublic: true,
state: true,
},
});
@@ -263,11 +315,24 @@
draft: state,
},
});
+ let deployKey: string | null = p.deployKey;
+ let deployKeyPublic: string | null = p.deployKeyPublic;
+ if (deployKeyPublic == null) {
+ [deployKeyPublic, deployKey] = await generateKey(tmp.dirSync().name);
+ await db.project.update({
+ where: { id: projectId },
+ data: { deployKeyPublic, deployKey },
+ });
+ }
let diff: RepoDiff | null = null;
- let deployKey: string | null = null;
+ const config = req.body.config;
+ config.input.key = {
+ public: deployKeyPublic,
+ private: deployKey,
+ };
try {
if (p.instanceId == null) {
- const deployResponse = await appManager.deploy(req.body.config);
+ const deployResponse = await appManager.deploy(config);
await db.project.update({
where: {
id: projectId,
@@ -276,16 +341,13 @@
state,
draft: null,
instanceId: deployResponse.id,
- deployKey: deployResponse.deployKey,
access: JSON.stringify(deployResponse.access),
},
});
diff = { toAdd: extractGithubRepos(state) };
- deployKey = deployResponse.deployKey;
} else {
- const deployResponse = await appManager.update(p.instanceId, req.body.config);
+ const deployResponse = await appManager.update(p.instanceId, config);
diff = calculateRepoDiff(extractGithubRepos(p.state), extractGithubRepos(state));
- deployKey = p.deployKey;
await db.project.update({
where: {
id: projectId,
@@ -299,7 +361,7 @@
}
if (diff && p.githubToken && deployKey) {
const github = new GithubClient(p.githubToken);
- await manageGithubRepos(github, diff, deployKey, env.PUBLIC_ADDR);
+ await manageGithubRepos(github, diff, deployKeyPublic!, env.PUBLIC_ADDR);
}
resp.status(200);
} catch (error) {
@@ -362,7 +424,7 @@
select: {
instanceId: true,
githubToken: true,
- deployKey: true,
+ deployKeyPublic: true,
state: true,
draft: true,
},
@@ -383,12 +445,12 @@
resp.write(JSON.stringify({ error: "Failed to remove deployment from cluster" }));
return;
}
- if (p.githubToken && p.deployKey && p.state) {
+ if (p.githubToken && p.deployKeyPublic && p.state) {
try {
const github = new GithubClient(p.githubToken);
const repos = extractGithubRepos(p.state);
const diff = { toDelete: repos, toAdd: [] };
- await manageGithubRepos(github, diff, p.deployKey, env.PUBLIC_ADDR);
+ await manageGithubRepos(github, diff, p.deployKeyPublic, env.PUBLIC_ADDR);
} catch (error) {
console.error("Error removing GitHub deploy keys:", error);
}
@@ -399,7 +461,7 @@
},
data: {
instanceId: null,
- deployKey: null,
+ deployKeyPublic: null,
access: null,
state: null,
draft: p.draft ?? p.state,
@@ -476,9 +538,10 @@
userId: resp.locals.userId,
},
select: {
- deployKey: true,
+ deployKeyPublic: true,
githubToken: true,
access: true,
+ instanceId: true,
},
});
if (!project) {
@@ -502,7 +565,8 @@
resp.write(
JSON.stringify({
managerAddr: env.INTERNAL_API_ADDR,
- deployKey: project.deployKey,
+ deployKeyPublic: project.deployKeyPublic == null ? undefined : project.deployKeyPublic,
+ instanceId: project.instanceId == null ? undefined : project.instanceId,
access: JSON.parse(project.access ?? "[]"),
integrations: {
github: !!project.githubToken,
@@ -683,6 +747,65 @@
}
};
+const analyzeRepoReqSchema = z.object({
+ address: z.string(),
+});
+
+const handleAnalyzeRepo: express.Handler = async (req, resp) => {
+ const projectId = Number(req.params["projectId"]);
+ const project = await db.project.findUnique({
+ where: {
+ id: projectId,
+ userId: resp.locals.userId,
+ },
+ select: {
+ githubToken: true,
+ deployKey: true,
+ deployKeyPublic: true,
+ },
+ });
+ if (!project) {
+ resp.status(404).send({ error: "Project not found" });
+ return;
+ }
+ if (!project.githubToken) {
+ resp.status(400).send({ error: "GitHub token not configured" });
+ return;
+ }
+ let deployKey: string | null = project.deployKey;
+ let deployKeyPublic: string | null = project.deployKeyPublic;
+ if (!deployKeyPublic) {
+ [deployKeyPublic, deployKey] = await generateKey(tmp.dirSync().name);
+ await db.project.update({
+ where: { id: projectId },
+ data: {
+ deployKeyPublic: deployKeyPublic,
+ deployKey: deployKey,
+ },
+ });
+ }
+ const github = new GithubClient(project.githubToken);
+ const result = analyzeRepoReqSchema.safeParse(req.body);
+ if (!result.success) {
+ resp.status(400).send({ error: "Invalid request data" });
+ return;
+ }
+ const { address } = result.data;
+ const tmpDir = tmp.dirSync();
+ await github.addDeployKey(address, deployKeyPublic);
+ await fs.promises.writeFile(path.join(tmpDir.name, "key"), deployKey!, {
+ mode: 0o600,
+ });
+ shell.exec(
+ `GIT_SSH_COMMAND='ssh -i ${tmpDir.name}/key -o IdentitiesOnly=yes' git clone ${address} ${tmpDir.name}/code`,
+ );
+ const fsc = new RealFileSystem(`${tmpDir.name}/code`);
+ const analyzer = new NodeJSAnalyzer();
+ const info = await analyzer.analyze(fsc, "/");
+ console.log(info);
+ resp.status(200).send([info]);
+};
+
const auth = (req: express.Request, resp: express.Response, next: express.NextFunction) => {
const userId = req.get("x-forwarded-userid");
const username = req.get("x-forwarded-user");
@@ -761,13 +884,14 @@
// Authenticated project routes
const projectRouter = express.Router();
- projectRouter.use(auth); // Apply auth middleware to this router
+ projectRouter.use(auth);
+ projectRouter.post("/:projectId/analyze", handleAnalyzeRepo);
projectRouter.post("/:projectId/saved", handleSave);
projectRouter.get("/:projectId/saved/deploy", handleSavedGet("deploy"));
projectRouter.get("/:projectId/saved/draft", handleSavedGet("draft"));
projectRouter.post("/:projectId/deploy", handleDeploy);
projectRouter.get("/:projectId/status", handleStatus);
- projectRouter.delete("/:projectId", handleDelete);
+ projectRouter.delete("/:projectId", handleProjectDelete);
projectRouter.get("/:projectId/repos/github", handleGithubRepos);
projectRouter.post("/:projectId/github-token", handleUpdateGithubToken);
projectRouter.get("/:projectId/env", handleEnv);
diff --git a/apps/canvas/back/src/lib/analyze.ts b/apps/canvas/back/src/lib/analyze.ts
new file mode 100644
index 0000000..e1fa699
--- /dev/null
+++ b/apps/canvas/back/src/lib/analyze.ts
@@ -0,0 +1,58 @@
+import { FileSystem } from "./fs";
+
+export interface ServiceAnalyzer {
+ detect: (fs: FileSystem, root: string) => boolean;
+ analyze: (fs: FileSystem, root: string) => Promise<ServiceInfo>;
+}
+
+export interface ConfigVar {
+ name: string;
+ category: ConfigVarCategory;
+ type?: ConfigVarType;
+ semanticType?: ConfigVarSemanticType;
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ defaultValue?: any;
+ description?: string;
+ required?: boolean;
+ sensitive?: boolean;
+}
+
+export enum ConfigVarCategory {
+ CommandLineFlag = "CommandLineFlag",
+ EnvironmentVariable = "EnvironmentVariable",
+}
+
+export enum ConfigVarType {
+ String = "String",
+ Number = "Number",
+ Boolean = "Boolean",
+}
+
+export enum ConfigVarSemanticType {
+ EXPANDED_ENV_VAR = "EXPANDED_ENV_VAR",
+ PORT = "PORT",
+ FILESYSTEM_PATH = "FILESYSTEM_PATH",
+ DATABASE_URL = "DATABASE_URL",
+ SQLITE_PATH = "SQLITE_PATH",
+ POSTGRES_URL = "POSTGRES_URL",
+ POSTGRES_PASSWORD = "POSTGRES_PASSWORD",
+ POSTGRES_USER = "POSTGRES_USER",
+ POSTGRES_DB = "POSTGRES_DB",
+ POSTGRES_PORT = "POSTGRES_PORT",
+ POSTGRES_HOST = "POSTGRES_HOST",
+ POSTGRES_SSL = "POSTGRES_SSL",
+ MONGO_URL = "MONGO_URL",
+ MONGO_PASSWORD = "MONGO_PASSWORD",
+ MONGO_USER = "MONGO_USER",
+ MONGO_DB = "MONGO_DB",
+ MONGO_PORT = "MONGO_PORT",
+ MONGO_HOST = "MONGO_HOST",
+ MONGO_SSL = "MONGO_SSL",
+}
+
+export interface ServiceInfo {
+ name: string;
+ location: string;
+ configVars: Array<ConfigVar>;
+ commands: Array<string>;
+}
diff --git a/apps/canvas/back/src/lib/env.ts b/apps/canvas/back/src/lib/env.ts
new file mode 100644
index 0000000..9b58ae1
--- /dev/null
+++ b/apps/canvas/back/src/lib/env.ts
@@ -0,0 +1,28 @@
+const regex = /(?<!\\)\${([^{}]+)}|(?<!\\)\$([A-Za-z_][A-Za-z0-9_]*)/g;
+
+export function expandValue(value: string): string[] {
+ const vars = new Set<string>();
+ const matches = [...value.matchAll(regex)];
+ while (matches.length > 0) {
+ const match = matches.shift();
+ if (match == null) {
+ break;
+ }
+ const [_, bracedExpression, unbracedExpression] = match;
+ const expression = bracedExpression || unbracedExpression;
+ const opRegex = /(:\+|\+|:-|-)/;
+ const opMatch = expression.match(opRegex);
+ const splitter = opMatch ? opMatch[0] : null;
+ let r: string[];
+ if (splitter != null) {
+ r = expression.split(splitter);
+ } else {
+ r = [expression];
+ }
+ const key = r.shift();
+ if (key != null) {
+ vars.add(key);
+ }
+ }
+ return [...vars];
+}
diff --git a/apps/canvas/back/src/lib/fs.ts b/apps/canvas/back/src/lib/fs.ts
new file mode 100644
index 0000000..75d9453
--- /dev/null
+++ b/apps/canvas/back/src/lib/fs.ts
@@ -0,0 +1,20 @@
+import fs from "fs";
+import path from "path";
+
+export interface FileSystem {
+ exists(path: string): boolean;
+ readFile(path: string, encoding?: BufferEncoding): Promise<string>;
+}
+
+export class RealFileSystem implements FileSystem {
+ constructor(private readonly root: string) {}
+
+ exists(p: string): boolean {
+ return fs.existsSync(path.join(this.root, p));
+ }
+
+ async readFile(p: string, encoding?: BufferEncoding): Promise<string> {
+ const contents = await fs.promises.readFile(path.join(this.root, p));
+ return contents.toString(encoding);
+ }
+}
diff --git a/apps/canvas/back/src/lib/nodejs.test.ts b/apps/canvas/back/src/lib/nodejs.test.ts
new file mode 100644
index 0000000..7d406b1
--- /dev/null
+++ b/apps/canvas/back/src/lib/nodejs.test.ts
@@ -0,0 +1,83 @@
+import { NodeJSAnalyzer } from "./nodejs";
+import { FileSystem, RealFileSystem } from "./fs";
+import { Volume, IFs, createFsFromVolume } from "memfs";
+import { test, expect } from "@jest/globals";
+import { expandValue } from "./env";
+import shell from "shelljs";
+
+class InMemoryFileSystem implements FileSystem {
+ constructor(private readonly fs: IFs) {}
+
+ exists(path: string): boolean {
+ return this.fs.existsSync(path);
+ }
+
+ // TODO(gio): add encoding
+ async readFile(path: string, encoding?: BufferEncoding): Promise<string> {
+ const contents = await this.fs.promises.readFile(path);
+ return contents.toString(encoding);
+ }
+}
+
+test("canvas", async () => {
+ const fs: FileSystem = new RealFileSystem("/home/gio/code/apps/canvas/back");
+ const analyzer = new NodeJSAnalyzer();
+ expect(analyzer.detect(fs, "/")).toBe(true);
+ const info = await analyzer.analyze(fs, "/");
+ console.log(info);
+});
+
+test("nodejs", async () => {
+ return;
+ const root = "/";
+ const vol = Volume.fromNestedJSON(
+ {
+ "package.json": JSON.stringify({
+ name: "test",
+ version: "1.0.0",
+ dependencies: {
+ prisma: "6.6.0",
+ },
+ }),
+ "package-lock.json": "fake",
+ "prisma/schema.prisma": `
+ datasource db {
+ provider = "sqlite"
+ url = env("DB_URL")
+ }
+ `,
+ },
+ root,
+ );
+ const fs: FileSystem = new InMemoryFileSystem(createFsFromVolume(vol));
+ const analyzer = new NodeJSAnalyzer();
+ expect(analyzer.detect(fs, root)).toBe(true);
+ const info = await analyzer.analyze(fs, root);
+ console.log(info);
+});
+
+test("env", () => {
+ console.log(expandValue("${PORT} ${DODO_VOLUME_DB}"));
+ console.log(expandValue("$PORT $DODO_VOLUME_DB"));
+ console.log(expandValue("${UNDEFINED:-${MACHINE}${UNDEFINED:-default}}"));
+});
+
+test("clone", async () => {
+ expect(shell.which("ssh-agent")).toBeTruthy();
+ expect(shell.which("ssh-add")).toBeTruthy();
+ expect(shell.which("git")).toBeTruthy();
+ expect(
+ shell.exec(
+ "GIT_SSH_COMMAND='ssh -i /home/gio/.ssh/key -o IdentitiesOnly=yes' git clone git@github.com:giolekva/dodo-blog.git /tmp/dodo-blog",
+ ).code,
+ ).toBe(0);
+ const fs: FileSystem = new RealFileSystem("/tmp/dodo-blog");
+ const analyzer = new NodeJSAnalyzer();
+ expect(analyzer.detect(fs, "/")).toBe(true);
+ const info = await analyzer.analyze(fs, "/");
+ console.log(info);
+});
+
+test("keygen", () => {
+ expect(shell.exec(`ssh-keygen -y -t ed25519 -f /tmp/key`).code).toBe(0);
+});
diff --git a/apps/canvas/back/src/lib/nodejs.ts b/apps/canvas/back/src/lib/nodejs.ts
new file mode 100644
index 0000000..07e6c1f
--- /dev/null
+++ b/apps/canvas/back/src/lib/nodejs.ts
@@ -0,0 +1,280 @@
+import path from "path";
+import { FileSystem } from "./fs";
+import { ServiceAnalyzer, ConfigVar, ConfigVarCategory, ConfigVarSemanticType } from "./analyze";
+import { parse as parseDotenv } from "dotenv";
+import { parsePrismaSchema } from "@loancrate/prisma-schema-parser";
+import { augmentConfigVar } from "./semantics";
+import { expandValue } from "./env";
+import { z } from "zod";
+
+const packageJsonFileName = "package.json";
+
+// eslint-disable-next-line @typescript-eslint/no-unused-vars
+const packageJsonSchema = z.object({
+ name: z.optional(z.string()),
+ version: z.optional(z.string()),
+ engines: z.optional(
+ z.object({
+ node: z.optional(z.string()),
+ deno: z.optional(z.string()),
+ }),
+ ),
+ dependencies: z.optional(z.record(z.string(), z.string())),
+ devDependencies: z.optional(z.record(z.string(), z.string())),
+});
+
+type PackageJson = z.infer<typeof packageJsonSchema>;
+
+interface ConfigVarDetector {
+ (fs: FileSystem, root: string, packageJson: PackageJson): Promise<ConfigVar | ConfigVar[] | null>;
+}
+
+// TODO(gio): add bun, deno, ...
+type NodeJSPackageManager =
+ | {
+ name: "npm";
+ version?: string;
+ }
+ | {
+ name: "pnpm";
+ version?: string;
+ }
+ | {
+ name: "yarn";
+ version?: string;
+ };
+
+type Runtime =
+ | {
+ name: "node";
+ version?: string;
+ }
+ | {
+ name: "deno";
+ version?: string;
+ };
+
+const defaultRuntime: Runtime = {
+ name: "node",
+};
+
+const defaultPackageManager: NodeJSPackageManager = {
+ name: "npm",
+};
+
+export class NodeJSAnalyzer implements ServiceAnalyzer {
+ detect(fs: FileSystem, root: string) {
+ const packageJsonPath = path.join(root, packageJsonFileName);
+ if (!fs.exists(packageJsonPath)) {
+ return false;
+ }
+ // TODO(gio): maybe it's deno
+ return true;
+ }
+
+ async analyze(fs: FileSystem, root: string) {
+ const packageJsonPath = path.join(root, packageJsonFileName);
+ const packageJson = JSON.parse(await fs.readFile(packageJsonPath));
+ const runtime = this.detectRuntime(packageJson);
+ const packageManager = this.detectPackageManager(fs, root);
+ console.log(runtime, packageManager);
+ let envVars = await this.detectEnvVars(fs, root);
+ const detectors: ConfigVarDetector[] = [this.detectPrismaSchema, this.detectNextjs, this.detectExpressjs];
+ const all = await Promise.all(
+ detectors.map(async (detector) => {
+ return await detector(fs, root, packageJson);
+ }),
+ );
+ all.map((cv) => {
+ if (Array.isArray(cv)) {
+ cv.forEach((v) => this.mergeConfigVars(envVars, v));
+ } else {
+ this.mergeConfigVars(envVars, cv);
+ }
+ });
+ envVars = envVars.filter((v) => v.semanticType != ConfigVarSemanticType.EXPANDED_ENV_VAR);
+ envVars.forEach((v) => augmentConfigVar(v));
+ return {
+ name: "name" in packageJson ? packageJson.name : "NodeJS",
+ location: root,
+ configVars: envVars,
+ commands: [],
+ };
+ }
+
+ private mergeConfigVars(configVars: ConfigVar[], v: ConfigVar | null) {
+ if (v == null) {
+ return;
+ }
+ const existing = configVars.find((c) => c.name === v.name);
+ if (existing != null) {
+ existing.category = existing.category ?? v.category;
+ existing.semanticType = existing.semanticType ?? v.semanticType;
+ existing.defaultValue = existing.defaultValue ?? v.defaultValue;
+ existing.description = existing.description ?? v.description;
+ existing.required = existing.required ?? v.required;
+ existing.sensitive = v.sensitive;
+ } else {
+ configVars.push(v);
+ }
+ }
+
+ private detectRuntime(packageJson: PackageJson): Runtime {
+ if (packageJson.engines && packageJson.engines.node) {
+ return {
+ name: "node",
+ version: packageJson.engines.node,
+ };
+ } else if (packageJson.engines && packageJson.engines.deno) {
+ return {
+ name: "deno",
+ version: packageJson.engines.deno,
+ };
+ }
+ return defaultRuntime;
+ }
+
+ private detectPackageManager(fs: FileSystem, root: string): NodeJSPackageManager | null {
+ if (fs.exists(path.join(root, "package-lock.yaml"))) {
+ return {
+ name: "npm",
+ };
+ } else if (fs.exists(path.join(root, "pnpm-lock.yaml"))) {
+ return {
+ name: "pnpm",
+ };
+ } else if (fs.exists(path.join(root, "yarn.lock"))) {
+ return {
+ name: "yarn",
+ };
+ }
+ return defaultPackageManager;
+ }
+
+ private async detectEnvVars(fs: FileSystem, root: string): Promise<ConfigVar[]> {
+ const envFilePath = path.join(root, ".env");
+ if (!fs.exists(envFilePath)) {
+ return [];
+ }
+ const envVars: ConfigVar[] = [];
+ const fileContent = await fs.readFile(envFilePath);
+ const parsedEnv = parseDotenv(fileContent);
+ for (const key in parsedEnv) {
+ if (Object.prototype.hasOwnProperty.call(parsedEnv, key)) {
+ const defaultValue = parsedEnv[key];
+ const vars = expandValue(defaultValue);
+ envVars.push({
+ name: key,
+ defaultValue,
+ category: ConfigVarCategory.EnvironmentVariable,
+ semanticType: ConfigVarSemanticType.EXPANDED_ENV_VAR,
+ });
+ vars.forEach((v) => {
+ envVars.push({
+ name: v,
+ defaultValue: "", // TODO(gio): add default value
+ category: ConfigVarCategory.EnvironmentVariable,
+ });
+ });
+ }
+ }
+ return envVars;
+ }
+
+ private async detectPrismaSchema(
+ fs: FileSystem,
+ root: string,
+ packageJson: PackageJson,
+ ): Promise<ConfigVar | ConfigVar[] | null> {
+ if (packageJson?.dependencies?.prisma == null && packageJson?.devDependencies?.prisma == null) {
+ return null;
+ }
+ let schemaPath = path.join(root, "prisma", "schema.prisma");
+ if (!fs.exists(schemaPath)) {
+ schemaPath = path.join(root, "schema.prisma");
+ if (!fs.exists(schemaPath)) {
+ return null;
+ }
+ }
+ const schemaContent = await fs.readFile(schemaPath);
+ const ast = parsePrismaSchema(schemaContent);
+ let urlVar: string | null = null;
+ let dbType: ConfigVarSemanticType | null = null;
+ for (const element of ast.declarations) {
+ if (element.kind === "datasource") {
+ for (const prop of element.members) {
+ if (prop.kind === "config") {
+ switch (prop.name.value) {
+ case "url": {
+ if (
+ prop.value.kind === "functionCall" &&
+ prop.value.path.value[0] === "env" &&
+ prop.value.args != null
+ ) {
+ const arg = prop.value.args[0];
+ if (arg.kind === "literal" && typeof arg.value === "string") {
+ urlVar = arg.value;
+ }
+ }
+ break;
+ }
+ case "provider": {
+ if (prop.value.kind === "literal" && typeof prop.value.value === "string") {
+ switch (prop.value.value) {
+ case "postgresql": {
+ dbType = ConfigVarSemanticType.POSTGRES_URL;
+ break;
+ }
+ case "sqlite": {
+ dbType = ConfigVarSemanticType.SQLITE_PATH;
+ break;
+ }
+ default: {
+ throw new Error(`Unsupported database type: ${prop.value.value}`);
+ }
+ }
+ }
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+ if (urlVar == null || dbType == null) {
+ return null;
+ }
+ return {
+ name: urlVar,
+ category: ConfigVarCategory.EnvironmentVariable,
+ semanticType: dbType,
+ };
+ }
+
+ private async detectNextjs(fs: FileSystem, root: string): Promise<ConfigVar | ConfigVar[] | null> {
+ const nextConfigPath = path.join(root, "next.config.mjs");
+ if (!fs.exists(nextConfigPath)) {
+ return null;
+ }
+ return {
+ name: "PORT",
+ category: ConfigVarCategory.EnvironmentVariable,
+ semanticType: ConfigVarSemanticType.PORT,
+ };
+ }
+
+ private async detectExpressjs(
+ fs: FileSystem,
+ root: string,
+ packageJson: PackageJson,
+ ): Promise<ConfigVar | ConfigVar[] | null> {
+ if (packageJson?.dependencies?.express == null && packageJson?.devDependencies?.express == null) {
+ return null;
+ }
+ return {
+ name: "PORT",
+ category: ConfigVarCategory.EnvironmentVariable,
+ semanticType: ConfigVarSemanticType.PORT,
+ };
+ }
+}
diff --git a/apps/canvas/back/src/lib/semantics.ts b/apps/canvas/back/src/lib/semantics.ts
new file mode 100644
index 0000000..2cbd1ab
--- /dev/null
+++ b/apps/canvas/back/src/lib/semantics.ts
@@ -0,0 +1,64 @@
+import { ConfigVar, ConfigVarSemanticType } from "./analyze";
+
+export function augmentConfigVar(cv: ConfigVar) {
+ if (cv.semanticType != null) {
+ return;
+ }
+ const name = cv.name.toLowerCase();
+ const value = cv.defaultValue ? String(cv.defaultValue).toLowerCase() : "";
+ if (name.includes("postgres") || name.includes("pg")) {
+ if (name.includes("url")) cv.semanticType = ConfigVarSemanticType.POSTGRES_URL;
+ if (name.includes("password") || name.includes("pass"))
+ cv.semanticType = ConfigVarSemanticType.POSTGRES_PASSWORD;
+ if (name.includes("user")) cv.semanticType = ConfigVarSemanticType.POSTGRES_USER;
+ if (
+ name.includes("db_name") ||
+ name.includes("database_name") ||
+ (name.includes("db") && !name.includes("url"))
+ )
+ cv.semanticType = ConfigVarSemanticType.POSTGRES_DB;
+ if (name.includes("port")) cv.semanticType = ConfigVarSemanticType.POSTGRES_PORT;
+ if (name.includes("host")) cv.semanticType = ConfigVarSemanticType.POSTGRES_HOST;
+ if (name.includes("sslmode") || name.includes("ssl")) cv.semanticType = ConfigVarSemanticType.POSTGRES_SSL;
+ }
+ if (name.includes("mongo")) {
+ if (name.includes("url") || name.includes("uri")) cv.semanticType = ConfigVarSemanticType.MONGO_URL;
+ if (name.includes("password") || name.includes("pass")) cv.semanticType = ConfigVarSemanticType.MONGO_PASSWORD;
+ if (name.includes("user")) cv.semanticType = ConfigVarSemanticType.MONGO_USER;
+ if (
+ name.includes("db_name") ||
+ name.includes("database_name") ||
+ (name.includes("db") && !name.includes("url"))
+ )
+ cv.semanticType = ConfigVarSemanticType.MONGO_DB;
+ if (name.includes("port")) cv.semanticType = ConfigVarSemanticType.MONGO_PORT;
+ if (name.includes("host")) cv.semanticType = ConfigVarSemanticType.MONGO_HOST;
+ if (name.includes("ssl")) cv.semanticType = ConfigVarSemanticType.MONGO_SSL;
+ }
+ if (name.includes("sqlite_path") || value.endsWith(".sqlite") || value.endsWith(".db3") || value.endsWith(".db")) {
+ cv.semanticType = ConfigVarSemanticType.SQLITE_PATH;
+ }
+ if (name.includes("sqlite") && (name.includes("url") || name.includes("uri") || name.includes("path"))) {
+ cv.semanticType = ConfigVarSemanticType.SQLITE_PATH;
+ }
+ if (name.includes("database_url") || name.includes("db_url")) {
+ cv.semanticType = ConfigVarSemanticType.DATABASE_URL;
+ }
+ if (name.includes("path") || name.includes("file_path") || name.includes("dir") || name.includes("directory")) {
+ if (
+ !name.includes("url") &&
+ (value.startsWith("./") || value.startsWith("../") || value.startsWith("/") || /^[a-z]:[\\/]/.test(value))
+ ) {
+ cv.semanticType = ConfigVarSemanticType.FILESYSTEM_PATH;
+ }
+ }
+ if (
+ !name.includes("url") &&
+ (value.startsWith("./") || value.startsWith("../") || value.startsWith("/") || /^[a-z]:[\\/]/.test(value))
+ ) {
+ cv.semanticType = ConfigVarSemanticType.FILESYSTEM_PATH;
+ }
+ if (name.includes("port")) {
+ cv.semanticType = ConfigVarSemanticType.PORT;
+ }
+}