Cavnas: Implement basic service discovery logic

Change-Id: I71b25076dba94d6491ad4db748b259870991c526
diff --git a/apps/canvas/back/src/lib/analyze.ts b/apps/canvas/back/src/lib/analyze.ts
new file mode 100644
index 0000000..e1fa699
--- /dev/null
+++ b/apps/canvas/back/src/lib/analyze.ts
@@ -0,0 +1,58 @@
+import { FileSystem } from "./fs";
+
+export interface ServiceAnalyzer {
+	detect: (fs: FileSystem, root: string) => boolean;
+	analyze: (fs: FileSystem, root: string) => Promise<ServiceInfo>;
+}
+
+export interface ConfigVar {
+	name: string;
+	category: ConfigVarCategory;
+	type?: ConfigVarType;
+	semanticType?: ConfigVarSemanticType;
+	// eslint-disable-next-line @typescript-eslint/no-explicit-any
+	defaultValue?: any;
+	description?: string;
+	required?: boolean;
+	sensitive?: boolean;
+}
+
+export enum ConfigVarCategory {
+	CommandLineFlag = "CommandLineFlag",
+	EnvironmentVariable = "EnvironmentVariable",
+}
+
+export enum ConfigVarType {
+	String = "String",
+	Number = "Number",
+	Boolean = "Boolean",
+}
+
+export enum ConfigVarSemanticType {
+	EXPANDED_ENV_VAR = "EXPANDED_ENV_VAR",
+	PORT = "PORT",
+	FILESYSTEM_PATH = "FILESYSTEM_PATH",
+	DATABASE_URL = "DATABASE_URL",
+	SQLITE_PATH = "SQLITE_PATH",
+	POSTGRES_URL = "POSTGRES_URL",
+	POSTGRES_PASSWORD = "POSTGRES_PASSWORD",
+	POSTGRES_USER = "POSTGRES_USER",
+	POSTGRES_DB = "POSTGRES_DB",
+	POSTGRES_PORT = "POSTGRES_PORT",
+	POSTGRES_HOST = "POSTGRES_HOST",
+	POSTGRES_SSL = "POSTGRES_SSL",
+	MONGO_URL = "MONGO_URL",
+	MONGO_PASSWORD = "MONGO_PASSWORD",
+	MONGO_USER = "MONGO_USER",
+	MONGO_DB = "MONGO_DB",
+	MONGO_PORT = "MONGO_PORT",
+	MONGO_HOST = "MONGO_HOST",
+	MONGO_SSL = "MONGO_SSL",
+}
+
+export interface ServiceInfo {
+	name: string;
+	location: string;
+	configVars: Array<ConfigVar>;
+	commands: Array<string>;
+}
diff --git a/apps/canvas/back/src/lib/env.ts b/apps/canvas/back/src/lib/env.ts
new file mode 100644
index 0000000..9b58ae1
--- /dev/null
+++ b/apps/canvas/back/src/lib/env.ts
@@ -0,0 +1,28 @@
+const regex = /(?<!\\)\${([^{}]+)}|(?<!\\)\$([A-Za-z_][A-Za-z0-9_]*)/g;
+
+export function expandValue(value: string): string[] {
+	const vars = new Set<string>();
+	const matches = [...value.matchAll(regex)];
+	while (matches.length > 0) {
+		const match = matches.shift();
+		if (match == null) {
+			break;
+		}
+		const [_, bracedExpression, unbracedExpression] = match;
+		const expression = bracedExpression || unbracedExpression;
+		const opRegex = /(:\+|\+|:-|-)/;
+		const opMatch = expression.match(opRegex);
+		const splitter = opMatch ? opMatch[0] : null;
+		let r: string[];
+		if (splitter != null) {
+			r = expression.split(splitter);
+		} else {
+			r = [expression];
+		}
+		const key = r.shift();
+		if (key != null) {
+			vars.add(key);
+		}
+	}
+	return [...vars];
+}
diff --git a/apps/canvas/back/src/lib/fs.ts b/apps/canvas/back/src/lib/fs.ts
new file mode 100644
index 0000000..75d9453
--- /dev/null
+++ b/apps/canvas/back/src/lib/fs.ts
@@ -0,0 +1,20 @@
+import fs from "fs";
+import path from "path";
+
+export interface FileSystem {
+	exists(path: string): boolean;
+	readFile(path: string, encoding?: BufferEncoding): Promise<string>;
+}
+
+export class RealFileSystem implements FileSystem {
+	constructor(private readonly root: string) {}
+
+	exists(p: string): boolean {
+		return fs.existsSync(path.join(this.root, p));
+	}
+
+	async readFile(p: string, encoding?: BufferEncoding): Promise<string> {
+		const contents = await fs.promises.readFile(path.join(this.root, p));
+		return contents.toString(encoding);
+	}
+}
diff --git a/apps/canvas/back/src/lib/nodejs.test.ts b/apps/canvas/back/src/lib/nodejs.test.ts
new file mode 100644
index 0000000..7d406b1
--- /dev/null
+++ b/apps/canvas/back/src/lib/nodejs.test.ts
@@ -0,0 +1,83 @@
+import { NodeJSAnalyzer } from "./nodejs";
+import { FileSystem, RealFileSystem } from "./fs";
+import { Volume, IFs, createFsFromVolume } from "memfs";
+import { test, expect } from "@jest/globals";
+import { expandValue } from "./env";
+import shell from "shelljs";
+
+class InMemoryFileSystem implements FileSystem {
+	constructor(private readonly fs: IFs) {}
+
+	exists(path: string): boolean {
+		return this.fs.existsSync(path);
+	}
+
+	// TODO(gio): add encoding
+	async readFile(path: string, encoding?: BufferEncoding): Promise<string> {
+		const contents = await this.fs.promises.readFile(path);
+		return contents.toString(encoding);
+	}
+}
+
+test("canvas", async () => {
+	const fs: FileSystem = new RealFileSystem("/home/gio/code/apps/canvas/back");
+	const analyzer = new NodeJSAnalyzer();
+	expect(analyzer.detect(fs, "/")).toBe(true);
+	const info = await analyzer.analyze(fs, "/");
+	console.log(info);
+});
+
+test("nodejs", async () => {
+	return;
+	const root = "/";
+	const vol = Volume.fromNestedJSON(
+		{
+			"package.json": JSON.stringify({
+				name: "test",
+				version: "1.0.0",
+				dependencies: {
+					prisma: "6.6.0",
+				},
+			}),
+			"package-lock.json": "fake",
+			"prisma/schema.prisma": `
+				datasource db {
+					provider = "sqlite"
+					url = env("DB_URL")
+				}
+			`,
+		},
+		root,
+	);
+	const fs: FileSystem = new InMemoryFileSystem(createFsFromVolume(vol));
+	const analyzer = new NodeJSAnalyzer();
+	expect(analyzer.detect(fs, root)).toBe(true);
+	const info = await analyzer.analyze(fs, root);
+	console.log(info);
+});
+
+test("env", () => {
+	console.log(expandValue("${PORT} ${DODO_VOLUME_DB}"));
+	console.log(expandValue("$PORT $DODO_VOLUME_DB"));
+	console.log(expandValue("${UNDEFINED:-${MACHINE}${UNDEFINED:-default}}"));
+});
+
+test("clone", async () => {
+	expect(shell.which("ssh-agent")).toBeTruthy();
+	expect(shell.which("ssh-add")).toBeTruthy();
+	expect(shell.which("git")).toBeTruthy();
+	expect(
+		shell.exec(
+			"GIT_SSH_COMMAND='ssh -i /home/gio/.ssh/key -o IdentitiesOnly=yes' git clone git@github.com:giolekva/dodo-blog.git /tmp/dodo-blog",
+		).code,
+	).toBe(0);
+	const fs: FileSystem = new RealFileSystem("/tmp/dodo-blog");
+	const analyzer = new NodeJSAnalyzer();
+	expect(analyzer.detect(fs, "/")).toBe(true);
+	const info = await analyzer.analyze(fs, "/");
+	console.log(info);
+});
+
+test("keygen", () => {
+	expect(shell.exec(`ssh-keygen -y -t ed25519 -f /tmp/key`).code).toBe(0);
+});
diff --git a/apps/canvas/back/src/lib/nodejs.ts b/apps/canvas/back/src/lib/nodejs.ts
new file mode 100644
index 0000000..07e6c1f
--- /dev/null
+++ b/apps/canvas/back/src/lib/nodejs.ts
@@ -0,0 +1,280 @@
+import path from "path";
+import { FileSystem } from "./fs";
+import { ServiceAnalyzer, ConfigVar, ConfigVarCategory, ConfigVarSemanticType } from "./analyze";
+import { parse as parseDotenv } from "dotenv";
+import { parsePrismaSchema } from "@loancrate/prisma-schema-parser";
+import { augmentConfigVar } from "./semantics";
+import { expandValue } from "./env";
+import { z } from "zod";
+
+const packageJsonFileName = "package.json";
+
+// eslint-disable-next-line @typescript-eslint/no-unused-vars
+const packageJsonSchema = z.object({
+	name: z.optional(z.string()),
+	version: z.optional(z.string()),
+	engines: z.optional(
+		z.object({
+			node: z.optional(z.string()),
+			deno: z.optional(z.string()),
+		}),
+	),
+	dependencies: z.optional(z.record(z.string(), z.string())),
+	devDependencies: z.optional(z.record(z.string(), z.string())),
+});
+
+type PackageJson = z.infer<typeof packageJsonSchema>;
+
+interface ConfigVarDetector {
+	(fs: FileSystem, root: string, packageJson: PackageJson): Promise<ConfigVar | ConfigVar[] | null>;
+}
+
+// TODO(gio): add bun, deno, ...
+type NodeJSPackageManager =
+	| {
+			name: "npm";
+			version?: string;
+	  }
+	| {
+			name: "pnpm";
+			version?: string;
+	  }
+	| {
+			name: "yarn";
+			version?: string;
+	  };
+
+type Runtime =
+	| {
+			name: "node";
+			version?: string;
+	  }
+	| {
+			name: "deno";
+			version?: string;
+	  };
+
+const defaultRuntime: Runtime = {
+	name: "node",
+};
+
+const defaultPackageManager: NodeJSPackageManager = {
+	name: "npm",
+};
+
+export class NodeJSAnalyzer implements ServiceAnalyzer {
+	detect(fs: FileSystem, root: string) {
+		const packageJsonPath = path.join(root, packageJsonFileName);
+		if (!fs.exists(packageJsonPath)) {
+			return false;
+		}
+		// TODO(gio): maybe it's deno
+		return true;
+	}
+
+	async analyze(fs: FileSystem, root: string) {
+		const packageJsonPath = path.join(root, packageJsonFileName);
+		const packageJson = JSON.parse(await fs.readFile(packageJsonPath));
+		const runtime = this.detectRuntime(packageJson);
+		const packageManager = this.detectPackageManager(fs, root);
+		console.log(runtime, packageManager);
+		let envVars = await this.detectEnvVars(fs, root);
+		const detectors: ConfigVarDetector[] = [this.detectPrismaSchema, this.detectNextjs, this.detectExpressjs];
+		const all = await Promise.all(
+			detectors.map(async (detector) => {
+				return await detector(fs, root, packageJson);
+			}),
+		);
+		all.map((cv) => {
+			if (Array.isArray(cv)) {
+				cv.forEach((v) => this.mergeConfigVars(envVars, v));
+			} else {
+				this.mergeConfigVars(envVars, cv);
+			}
+		});
+		envVars = envVars.filter((v) => v.semanticType != ConfigVarSemanticType.EXPANDED_ENV_VAR);
+		envVars.forEach((v) => augmentConfigVar(v));
+		return {
+			name: "name" in packageJson ? packageJson.name : "NodeJS",
+			location: root,
+			configVars: envVars,
+			commands: [],
+		};
+	}
+
+	private mergeConfigVars(configVars: ConfigVar[], v: ConfigVar | null) {
+		if (v == null) {
+			return;
+		}
+		const existing = configVars.find((c) => c.name === v.name);
+		if (existing != null) {
+			existing.category = existing.category ?? v.category;
+			existing.semanticType = existing.semanticType ?? v.semanticType;
+			existing.defaultValue = existing.defaultValue ?? v.defaultValue;
+			existing.description = existing.description ?? v.description;
+			existing.required = existing.required ?? v.required;
+			existing.sensitive = v.sensitive;
+		} else {
+			configVars.push(v);
+		}
+	}
+
+	private detectRuntime(packageJson: PackageJson): Runtime {
+		if (packageJson.engines && packageJson.engines.node) {
+			return {
+				name: "node",
+				version: packageJson.engines.node,
+			};
+		} else if (packageJson.engines && packageJson.engines.deno) {
+			return {
+				name: "deno",
+				version: packageJson.engines.deno,
+			};
+		}
+		return defaultRuntime;
+	}
+
+	private detectPackageManager(fs: FileSystem, root: string): NodeJSPackageManager | null {
+		if (fs.exists(path.join(root, "package-lock.yaml"))) {
+			return {
+				name: "npm",
+			};
+		} else if (fs.exists(path.join(root, "pnpm-lock.yaml"))) {
+			return {
+				name: "pnpm",
+			};
+		} else if (fs.exists(path.join(root, "yarn.lock"))) {
+			return {
+				name: "yarn",
+			};
+		}
+		return defaultPackageManager;
+	}
+
+	private async detectEnvVars(fs: FileSystem, root: string): Promise<ConfigVar[]> {
+		const envFilePath = path.join(root, ".env");
+		if (!fs.exists(envFilePath)) {
+			return [];
+		}
+		const envVars: ConfigVar[] = [];
+		const fileContent = await fs.readFile(envFilePath);
+		const parsedEnv = parseDotenv(fileContent);
+		for (const key in parsedEnv) {
+			if (Object.prototype.hasOwnProperty.call(parsedEnv, key)) {
+				const defaultValue = parsedEnv[key];
+				const vars = expandValue(defaultValue);
+				envVars.push({
+					name: key,
+					defaultValue,
+					category: ConfigVarCategory.EnvironmentVariable,
+					semanticType: ConfigVarSemanticType.EXPANDED_ENV_VAR,
+				});
+				vars.forEach((v) => {
+					envVars.push({
+						name: v,
+						defaultValue: "", // TODO(gio): add default value
+						category: ConfigVarCategory.EnvironmentVariable,
+					});
+				});
+			}
+		}
+		return envVars;
+	}
+
+	private async detectPrismaSchema(
+		fs: FileSystem,
+		root: string,
+		packageJson: PackageJson,
+	): Promise<ConfigVar | ConfigVar[] | null> {
+		if (packageJson?.dependencies?.prisma == null && packageJson?.devDependencies?.prisma == null) {
+			return null;
+		}
+		let schemaPath = path.join(root, "prisma", "schema.prisma");
+		if (!fs.exists(schemaPath)) {
+			schemaPath = path.join(root, "schema.prisma");
+			if (!fs.exists(schemaPath)) {
+				return null;
+			}
+		}
+		const schemaContent = await fs.readFile(schemaPath);
+		const ast = parsePrismaSchema(schemaContent);
+		let urlVar: string | null = null;
+		let dbType: ConfigVarSemanticType | null = null;
+		for (const element of ast.declarations) {
+			if (element.kind === "datasource") {
+				for (const prop of element.members) {
+					if (prop.kind === "config") {
+						switch (prop.name.value) {
+							case "url": {
+								if (
+									prop.value.kind === "functionCall" &&
+									prop.value.path.value[0] === "env" &&
+									prop.value.args != null
+								) {
+									const arg = prop.value.args[0];
+									if (arg.kind === "literal" && typeof arg.value === "string") {
+										urlVar = arg.value;
+									}
+								}
+								break;
+							}
+							case "provider": {
+								if (prop.value.kind === "literal" && typeof prop.value.value === "string") {
+									switch (prop.value.value) {
+										case "postgresql": {
+											dbType = ConfigVarSemanticType.POSTGRES_URL;
+											break;
+										}
+										case "sqlite": {
+											dbType = ConfigVarSemanticType.SQLITE_PATH;
+											break;
+										}
+										default: {
+											throw new Error(`Unsupported database type: ${prop.value.value}`);
+										}
+									}
+								}
+								break;
+							}
+						}
+					}
+				}
+			}
+		}
+		if (urlVar == null || dbType == null) {
+			return null;
+		}
+		return {
+			name: urlVar,
+			category: ConfigVarCategory.EnvironmentVariable,
+			semanticType: dbType,
+		};
+	}
+
+	private async detectNextjs(fs: FileSystem, root: string): Promise<ConfigVar | ConfigVar[] | null> {
+		const nextConfigPath = path.join(root, "next.config.mjs");
+		if (!fs.exists(nextConfigPath)) {
+			return null;
+		}
+		return {
+			name: "PORT",
+			category: ConfigVarCategory.EnvironmentVariable,
+			semanticType: ConfigVarSemanticType.PORT,
+		};
+	}
+
+	private async detectExpressjs(
+		fs: FileSystem,
+		root: string,
+		packageJson: PackageJson,
+	): Promise<ConfigVar | ConfigVar[] | null> {
+		if (packageJson?.dependencies?.express == null && packageJson?.devDependencies?.express == null) {
+			return null;
+		}
+		return {
+			name: "PORT",
+			category: ConfigVarCategory.EnvironmentVariable,
+			semanticType: ConfigVarSemanticType.PORT,
+		};
+	}
+}
diff --git a/apps/canvas/back/src/lib/semantics.ts b/apps/canvas/back/src/lib/semantics.ts
new file mode 100644
index 0000000..2cbd1ab
--- /dev/null
+++ b/apps/canvas/back/src/lib/semantics.ts
@@ -0,0 +1,64 @@
+import { ConfigVar, ConfigVarSemanticType } from "./analyze";
+
+export function augmentConfigVar(cv: ConfigVar) {
+	if (cv.semanticType != null) {
+		return;
+	}
+	const name = cv.name.toLowerCase();
+	const value = cv.defaultValue ? String(cv.defaultValue).toLowerCase() : "";
+	if (name.includes("postgres") || name.includes("pg")) {
+		if (name.includes("url")) cv.semanticType = ConfigVarSemanticType.POSTGRES_URL;
+		if (name.includes("password") || name.includes("pass"))
+			cv.semanticType = ConfigVarSemanticType.POSTGRES_PASSWORD;
+		if (name.includes("user")) cv.semanticType = ConfigVarSemanticType.POSTGRES_USER;
+		if (
+			name.includes("db_name") ||
+			name.includes("database_name") ||
+			(name.includes("db") && !name.includes("url"))
+		)
+			cv.semanticType = ConfigVarSemanticType.POSTGRES_DB;
+		if (name.includes("port")) cv.semanticType = ConfigVarSemanticType.POSTGRES_PORT;
+		if (name.includes("host")) cv.semanticType = ConfigVarSemanticType.POSTGRES_HOST;
+		if (name.includes("sslmode") || name.includes("ssl")) cv.semanticType = ConfigVarSemanticType.POSTGRES_SSL;
+	}
+	if (name.includes("mongo")) {
+		if (name.includes("url") || name.includes("uri")) cv.semanticType = ConfigVarSemanticType.MONGO_URL;
+		if (name.includes("password") || name.includes("pass")) cv.semanticType = ConfigVarSemanticType.MONGO_PASSWORD;
+		if (name.includes("user")) cv.semanticType = ConfigVarSemanticType.MONGO_USER;
+		if (
+			name.includes("db_name") ||
+			name.includes("database_name") ||
+			(name.includes("db") && !name.includes("url"))
+		)
+			cv.semanticType = ConfigVarSemanticType.MONGO_DB;
+		if (name.includes("port")) cv.semanticType = ConfigVarSemanticType.MONGO_PORT;
+		if (name.includes("host")) cv.semanticType = ConfigVarSemanticType.MONGO_HOST;
+		if (name.includes("ssl")) cv.semanticType = ConfigVarSemanticType.MONGO_SSL;
+	}
+	if (name.includes("sqlite_path") || value.endsWith(".sqlite") || value.endsWith(".db3") || value.endsWith(".db")) {
+		cv.semanticType = ConfigVarSemanticType.SQLITE_PATH;
+	}
+	if (name.includes("sqlite") && (name.includes("url") || name.includes("uri") || name.includes("path"))) {
+		cv.semanticType = ConfigVarSemanticType.SQLITE_PATH;
+	}
+	if (name.includes("database_url") || name.includes("db_url")) {
+		cv.semanticType = ConfigVarSemanticType.DATABASE_URL;
+	}
+	if (name.includes("path") || name.includes("file_path") || name.includes("dir") || name.includes("directory")) {
+		if (
+			!name.includes("url") &&
+			(value.startsWith("./") || value.startsWith("../") || value.startsWith("/") || /^[a-z]:[\\/]/.test(value))
+		) {
+			cv.semanticType = ConfigVarSemanticType.FILESYSTEM_PATH;
+		}
+	}
+	if (
+		!name.includes("url") &&
+		(value.startsWith("./") || value.startsWith("../") || value.startsWith("/") || /^[a-z]:[\\/]/.test(value))
+	) {
+		cv.semanticType = ConfigVarSemanticType.FILESYSTEM_PATH;
+	}
+	if (name.includes("port")) {
+		cv.semanticType = ConfigVarSemanticType.PORT;
+	}
+}