diff --git a/packages/local/package.json b/packages/local/package.json index 4556e0b7..a6d1f4c8 100644 --- a/packages/local/package.json +++ b/packages/local/package.json @@ -1,6 +1,6 @@ { "name": "@tableland/local", - "version": "2.0.1", + "version": "2.0.2", "description": "Tooling to start a sandboxed Tableland network.", "repository": "https://github.com/tablelandnetwork/local-tableland", "license": "MIT", diff --git a/packages/local/src/main.ts b/packages/local/src/main.ts index 253a7c27..3dc3a9f4 100644 --- a/packages/local/src/main.ts +++ b/packages/local/src/main.ts @@ -5,11 +5,13 @@ import { type ChildProcess } from "node:child_process"; import { EventEmitter } from "node:events"; import spawn from "cross-spawn"; import shell from "shelljs"; +import { getDefaultProvider } from "ethers"; import { chalk } from "./chalk.js"; import { ValidatorDev, ValidatorPkg } from "./validators.js"; import { buildConfig, type Config, + checkPortInUse, defaultRegistryDir, inDebugMode, isValidPort, @@ -22,12 +24,15 @@ import { getValidator, logSync, pipeNamedSubprocess, - probePortInUse, waitForReady, } from "./util.js"; const spawnSync = spawn.sync; +// TODO: maybe this can be parsed out of the deploy process? +// Since it's always the same address just hardcoding for now. +const registryAddress = "0xe7f1725e7734ce288f8367e1bb143e90bb3f0512"; + class LocalTableland { config; initEmitter; @@ -62,7 +67,7 @@ class LocalTableland { typeof config.registryDir === "string" && config.registryDir.trim() !== "" ) { - this.registryDir = config.registryDir; + this.registryDir = config.registryDir.trim(); } else { this.registryDir = defaultRegistryDir(); } @@ -80,19 +85,15 @@ class LocalTableland { } async #_start(config: Config = {}): Promise { - if ( - typeof this.registryDir !== "string" || - this.registryDir.trim() === "" - ) { + if (typeof this.registryDir !== "string" || this.registryDir === "") { throw new Error("cannot start a local network without Registry"); } // make sure we are starting fresh - // TODO: I don't think this is doing anything anymore... this.#_cleanup(); // Check if the hardhat port is in use (defaults to 5 retries, 300ms b/w each try) - const registryPortIsTaken = await probePortInUse(this.registryPort); + const registryPortIsTaken = await checkPortInUse(this.registryPort); // Note: this generally works, but there is a chance that the port will be // taken but returns `false`. E.g., try racing two instances at *exactly* // the same, and `EADDRINUSE` occurs. But generally, it'll work as expected. @@ -101,14 +102,18 @@ class LocalTableland { // Else, notify the user only if it's a not the default and is custom. if (registryPortIsTaken) { throw new Error(`port ${this.registryPort} already in use`); - } else { - // Notify that we're using a custom port since it's not the default 8545 + } + + // Notify that we're using a custom port since it's not the default 8545 + if ( this.registryPort !== this.defaultRegistryPort && - shell.echo( - `[${chalk.magenta.bold("Notice")}] Registry is using custom port ${ - this.registryPort - }` - ); + this.silent !== true + ) { + shell.echo( + `[${chalk.magenta.bold("Notice")}] Registry is using custom port ${ + this.registryPort + }` + ); } // You *must* store these in `process.env` to access within the hardhat subprocess @@ -149,17 +154,16 @@ class LocalTableland { // wait until initialization is done await waitForReady(registryReadyEvent, this.initEmitter); - // Deploy the Registry to the Hardhat node - logSync( - spawnSync( - isWindows() ? "npx.cmd" : "npx", - ["hardhat", "run", "--network", "localhost", "scripts/deploy.ts"], - { - cwd: this.registryDir, - } - ), - !inDebugMode() - ); + await new Promise((resolve) => setTimeout(resolve, 5000)); + + this._deployRegistry(); + + const deployed = await this.#_ensureRegistry(); + if (!deployed) { + throw new Error( + "deploying registry contract failed, cannot start network" + ); + } // Need to determine if we are starting the validator via docker // and a local repo, or if are running a binary etc... @@ -172,7 +176,7 @@ class LocalTableland { // run this before starting in case the last instance of the validator didn't get cleanup after // this might be needed if a test runner force quits the parent local-tableland process this.validator.cleanup(); - this.validator.start(); + this.validator.start(registryAddress); // TODO: It seems like this check isn't sufficient to see if the process is gonna get to a point // where the on error listener can be attached. @@ -220,6 +224,33 @@ class LocalTableland { console.log("\n\n*************************************\n"); } + // note: Tests are using sinon to stub this method. Because typescript compiles ecmascript + // private features, i.e. hash syntax, in a way that does not work with sinon we must + // use the ts private modifier here in order to test the failure to deploy the registry. + private _deployRegistry(): void { + // Deploy the Registry to the Hardhat node + logSync( + spawnSync( + isWindows() ? "npx.cmd" : "npx", + ["hardhat", "run", "--network", "localhost", "scripts/deploy.ts"], + { + cwd: this.registryDir, + } + ), + !inDebugMode() + ); + } + + async #_ensureRegistry(): Promise { + const provider = getDefaultProvider( + `http://127.0.0.1:${this.registryPort}` + ); + const code = await provider.getCode(registryAddress); + + // if the contract exists, and is not empty, code will not be equal to 0x + return code !== "0x"; + } + async #_setReady(): Promise { this.ready = true; while (this.#_readyResolves.length > 0) { diff --git a/packages/local/src/util.ts b/packages/local/src/util.ts index 0fca6edb..f703650f 100644 --- a/packages/local/src/util.ts +++ b/packages/local/src/util.ts @@ -402,31 +402,6 @@ export async function checkPortInUse(port: number): Promise { }); } -/** - * Probe a port with retries to check if it is in use. - * @param port The port number. - * @param tries Number of retries to attempt. Defaults to 5. - * @param delay Time to wait between retries (in milliseconds). Defaults to 300. - * @returns true if the port is in use, false otherwise - */ -export async function probePortInUse( - port: number, - tries: number = 5, - delay: number = 300 -): Promise { - let numTries = 0; - while (numTries < tries) { - // Note: consider splitting the delay into before and after this check - // Racing two instances might cause this to incorrectly return `false` - const portIsTaken = await checkPortInUse(port); - if (!portIsTaken) return false; - - await new Promise((resolve) => setTimeout(resolve, delay)); - numTries++; - } - return true; -} - const hardhatAccounts = [ "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", "59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d", diff --git a/packages/local/src/validators.ts b/packages/local/src/validators.ts index 522c8c84..c66a168e 100644 --- a/packages/local/src/validators.ts +++ b/packages/local/src/validators.ts @@ -34,7 +34,7 @@ class ValidatorPkg { } } - start(): void { + start(registryAddress?: string): void { const binPath = getBinPath(); if (binPath == null) { throw new Error( @@ -141,7 +141,10 @@ class ValidatorDev { } } - start(): void { + start(registryAddress?: string): void { + if (typeof registryAddress !== "string") { + throw new Error("must provide registry address"); + } // Add the registry address to the Validator config // TODO: when https://github.com/tablelandnetwork/go-tableland/issues/317 is // resolved we may be able to refactor a lot of this @@ -163,14 +166,11 @@ class ValidatorDev { if ( validatorConfig.Chains[0].Registry.EthEndpoint !== `ws://localhost:${this.registryPort}` - ) + ) { validatorConfig.Chains[0].Registry.EthEndpoint = `ws://localhost:${this.registryPort}`; + } - // TODO: this could be parsed out of the deploy process, but since - // it's always the same address just hardcoding it here - // TODO: maybe we can get this from evm-tableland? - validatorConfig.Chains[0].Registry.ContractAddress = - "0xe7f1725e7734ce288f8367e1bb143e90bb3f0512"; + validatorConfig.Chains[0].Registry.ContractAddress = registryAddress; writeFileSync(configFilePath, JSON.stringify(validatorConfig, null, 2)); diff --git a/packages/local/test/e2e.test.ts b/packages/local/test/e2e.test.ts index b033a147..35f831e3 100644 --- a/packages/local/test/e2e.test.ts +++ b/packages/local/test/e2e.test.ts @@ -1,220 +1,17 @@ -import { type Server } from "node:net"; -import { join } from "node:path"; -import { readFileSync } from "node:fs"; -import { describe, afterEach, after, before, test } from "mocha"; +import { describe, after, before, test } from "mocha"; import chai from "chai"; import { - checkPortInUse, getAccounts, getDatabase, getRegistry, - getRegistryPort, getValidator, } from "../src/util.js"; import { LocalTableland } from "../src/main.js"; -import { - logMetrics, - measureExecutionTime, - startMockServer, - stopMockServer, -} from "./util.js"; const expect = chai.expect; const localTablelandChainId = 31337; -const executionTimes: { - start: number[]; - shutdown: number[]; -} = { - start: [], - shutdown: [], -}; - -describe("Validator and Chain startup and shutdown", function () { - let server: Server | undefined; - let lt: LocalTableland | undefined; - const defaultPort = 8545; // Used for hardhat - - this.timeout(30000); // Starting up LT takes 3000-7000ms; shutting down takes <10-10000ms - afterEach(async function () { - // Ensure all processes are cleaned up after each test - if (server != null) { - await stopMockServer(server); - server = undefined; - } - // Ensure both validator and registry haven't already been shut down & cleaned up - // before attempting to shut them down - if (lt?.validator !== undefined && lt?.registry !== undefined) { - const shutdownExecutionTime = await measureExecutionTime( - async () => await lt?.shutdown() - ); - executionTimes.shutdown.push(shutdownExecutionTime); - lt = undefined; - } - }); - - test("successfully starts and shuts down", async function () { - lt = new LocalTableland({ silent: true }); - const startupExecutionTime = await measureExecutionTime( - async () => await lt?.start() - ); - executionTimes.start.push(startupExecutionTime); - expect(lt.validator).to.not.equal(undefined); - expect(lt.registry).to.not.equal(undefined); - - const shutdownExecutionTime = await measureExecutionTime( - async () => await lt?.shutdown() - ); - executionTimes.shutdown.push(shutdownExecutionTime); - expect(lt.validator).to.be.equal(undefined); - expect(lt.registry).to.be.equal(undefined); - }); - - test("successfully starts with retry logic after port 8545 initially in use", async function () { - lt = new LocalTableland({ silent: true }); - // Start a server on port 8545 to block Local Tableland from using it - server = await startMockServer(defaultPort); - // Verify that the server is running on port 8545 - const portInUse = await checkPortInUse(defaultPort); - expect(portInUse).to.equal(true); - - // Shut down the server after 300ms, allowing Local Tableland to use port 8545 - // This will execute 2 of 5 retries on port 8545 before opening the port - setTimeout(() => { - // TODO: eslint typescript is complaining about a promise as an argument to setTimeout, - // but this is really ugly. Need to consider changing linting rules - (async function () { - await stopMockServer(server as Server); - })().catch((err) => console.log(err)); - }, 300); - - const startupExecutionTime = await measureExecutionTime(async () => { - return await lt?.start(); - }); - executionTimes.start.push(startupExecutionTime); - - // Check that the network is running and can be queried - const accounts = getAccounts(); - const signer = accounts[1]; - const db = getDatabase(signer); - // Make sure LT materialized the healthbot table - await new Promise((resolve) => setTimeout(() => resolve(undefined), 2000)); - const { results } = await db - .prepare(`SELECT * FROM healthbot_31337_1;`) - .all(); - expect(results).to.not.be.deep.equal([]); - }); - test("fails to start due to port 8545 in use", async function () { - lt = new LocalTableland({ silent: true }); - // Start a server on port 8545 to block Local Tableland from using it - server = await startMockServer(defaultPort); - // Check if it is in use - const portInUse = await checkPortInUse(defaultPort); - expect(portInUse).to.equal(true); - - // Local Tableland should not start successfully - // No `measureExecutionTime` wrapper needed - await expect( - (async function () { - await lt.start(); - })() - ).to.be.rejectedWith(`port ${defaultPort} already in use`); - }); - - describe("with custom registryPort", function () { - test("successfully starts and works with SDK", async function () { - const customPort = 9999; - lt = new LocalTableland({ - silent: true, - registryPort: customPort, - }); - // Make sure it is not in use - const portInUse = await checkPortInUse(customPort); - expect(portInUse).to.equal(false); - - // Local Tableland should start successfully on custom Registry port - const startupExecutionTime = await measureExecutionTime( - async () => await lt?.start() - ); - executionTimes.start.push(startupExecutionTime); - const ltPort = getRegistryPort(lt); - expect(ltPort).to.equal(customPort); - - // Should still be able to use SDK - const accounts = getAccounts(lt); - expect(accounts.length).to.equal(20); - const signer = accounts[1]; - const db = getDatabase(signer); - const { meta } = await db - .prepare(`CREATE TABLE test_registry (id INT);`) - .run(); - const tableName = meta.txn?.name ?? ""; - expect(tableName).to.match(/^test_registry_31337_\d+$/); - }); - - test("successfully start by overwriting validator config and reset config on shutdown", async function () { - const customPort = 9999; - lt = new LocalTableland({ - silent: true, - registryPort: customPort, - }); - // Make sure it is not in use - const portInUse = await checkPortInUse(customPort); - expect(portInUse).to.equal(false); - - // Local Tableland should start successfully on custom Registry port - const startupExecutionTime = await measureExecutionTime( - async () => await lt?.start() - ); - executionTimes.start.push(startupExecutionTime); - const ltPort = getRegistryPort(lt); - expect(ltPort).to.equal(customPort); - - // Config file should have been updated to use custom port 9999 - const configFilePath = join( - lt.validator?.validatorDir ?? "", - "config.json" - ); - let configFile = readFileSync(configFilePath); - let validatorConfig = JSON.parse(configFile.toString()); - expect(validatorConfig.Chains[0].Registry.EthEndpoint).to.equal( - `ws://localhost:${ltPort}` - ); - - // Shut down Local Tableland and ensure validator config file is reset - const shutdownExecutionTime = await measureExecutionTime( - async () => await lt?.shutdown() - ); - executionTimes.shutdown.push(shutdownExecutionTime); - configFile = readFileSync(configFilePath); - validatorConfig = JSON.parse(configFile.toString()); - expect(validatorConfig.Chains[0].Registry.EthEndpoint).to.equal( - `ws://localhost:8545` - ); - }); - - test("fails to start due to custom port in use", async function () { - const customPort = 9999; - lt = new LocalTableland({ silent: true, registryPort: customPort }); - // Start a server on `customPort` to block Local Tableland from using it - server = await startMockServer(customPort); - // Check if it is in use - const portInUse = await checkPortInUse(customPort); - expect(portInUse).to.equal(true); - // Try to start Local Tableland, which will attempt to use `customPort` and fail - await expect( - (async function () { - await lt.start(); - })() - ).to.be.rejectedWith(`port ${customPort} already in use`); - // Ensure Local Tableland subprocesses did not start and/or are not hanging - expect(lt.validator).to.equal(undefined); - expect(lt.registry).to.equal(undefined); - }); - }); -}); - -describe("Validator, Chain, and SDK work end to end", function () { +describe("network end to end", function () { const lt = new LocalTableland({ silent: true, }); @@ -223,22 +20,11 @@ describe("Validator, Chain, and SDK work end to end", function () { // These tests take a bit longer than normal since we are running them against an actual network this.timeout(30000); before(async function () { - const startupExecutionTime = await measureExecutionTime( - async () => await lt.start() - ); - executionTimes.start.push(startupExecutionTime); - await new Promise((resolve) => setTimeout(() => resolve(undefined), 2000)); + await lt.start(); }); after(async function () { - const shutdownExecutionTime = await measureExecutionTime( - async () => await lt.shutdown() - ); - executionTimes.shutdown.push(shutdownExecutionTime); - // Calculate & log the min, max, median, and average start and shutdown times - console.log(`\nExecution metrics`); - logMetrics("start()", executionTimes.start); - logMetrics("shutdown()", executionTimes.shutdown); + await lt.shutdown(); }); test("creates a table that can be read from", async function () { diff --git a/packages/local/test/start.test.ts b/packages/local/test/start.test.ts new file mode 100644 index 00000000..c4cbd816 --- /dev/null +++ b/packages/local/test/start.test.ts @@ -0,0 +1,196 @@ +import { type Server } from "node:net"; +import { join } from "node:path"; +import { readFileSync } from "node:fs"; +import { describe, test } from "mocha"; +import chai from "chai"; +import { stub, restore, assert as sinonAssert } from "sinon"; +import { + checkPortInUse, + getAccounts, + getDatabase, + getRegistryPort, +} from "../src/util.js"; +import { LocalTableland } from "../src/main.js"; +import { + measureExecutionTime, + startMockServer, + stopMockServer, +} from "./util.js"; + +const expect = chai.expect; +const executionTimes: { + start: number[]; + shutdown: number[]; +} = { + start: [], + shutdown: [], +}; + +describe("startup and shutdown", function () { + let server: Server | undefined; + let lt: LocalTableland | undefined; + const defaultPort = 8545; // Used for hardhat + + this.timeout(30000); // Starting up LT takes 3000-7000ms; shutting down takes <10-10000ms + afterEach(async function () { + restore(); + // Ensure all processes are cleaned up after each test + if (server != null) { + await stopMockServer(server); + server = undefined; + } + // Ensure both validator and registry haven't already been shut down & cleaned up + // before attempting to shut them down + if (lt?.validator !== undefined && lt?.registry !== undefined) { + const shutdownExecutionTime = await measureExecutionTime( + async () => await lt?.shutdown() + ); + executionTimes.shutdown.push(shutdownExecutionTime); + lt = undefined; + } + }); + + test("successfully starts and shuts down", async function () { + lt = new LocalTableland({ silent: true }); + const startupExecutionTime = await measureExecutionTime( + async () => await lt?.start() + ); + executionTimes.start.push(startupExecutionTime); + expect(lt.validator).to.not.equal(undefined); + expect(lt.registry).to.not.equal(undefined); + + const shutdownExecutionTime = await measureExecutionTime( + async () => await lt?.shutdown() + ); + executionTimes.shutdown.push(shutdownExecutionTime); + expect(lt.validator).to.be.equal(undefined); + expect(lt.registry).to.be.equal(undefined); + }); + + test("fails to start due to port 8545 in use", async function () { + lt = new LocalTableland({ silent: true }); + // Start a server on port 8545 to block Local Tableland from using it + server = await startMockServer(defaultPort); + // Check if it is in use + const portInUse = await checkPortInUse(defaultPort); + expect(portInUse).to.equal(true); + + // Local Tableland should not start successfully + // No `measureExecutionTime` wrapper needed + await expect( + (async function () { + await lt.start(); + })() + ).to.be.rejectedWith(`port ${defaultPort} already in use`); + }); + + test("fails to start due to registry deploy failure", async function () { + const lt = new LocalTableland({ silent: true }); + // note: need to cast as `any` since the method is private + const deployStub = stub(lt, "_deployRegistry" as any); + + // Try to start Local Tableland, which will attempt to use `customPort` and fail + await expect( + (async function () { + await lt.start(); + })() + ).to.be.rejectedWith( + "deploying registry contract failed, cannot start network" + ); + + sinonAssert.calledOnce(deployStub); + restore(); + }); + + describe("with custom registryPort", function () { + test("successfully starts and works with SDK", async function () { + const customPort = 9999; + lt = new LocalTableland({ + silent: true, + registryPort: customPort, + }); + // Make sure it is not in use + const portInUse = await checkPortInUse(customPort); + expect(portInUse).to.equal(false); + + // Local Tableland should start successfully on custom Registry port + const startupExecutionTime = await measureExecutionTime( + async () => await lt?.start() + ); + executionTimes.start.push(startupExecutionTime); + const ltPort = getRegistryPort(lt); + expect(ltPort).to.equal(customPort); + + // Should still be able to use SDK + const accounts = getAccounts(lt); + expect(accounts.length).to.equal(20); + const signer = accounts[1]; + const db = getDatabase(signer); + const { meta } = await db + .prepare(`CREATE TABLE test_registry (id INT);`) + .run(); + const tableName = meta.txn?.name ?? ""; + expect(tableName).to.match(/^test_registry_31337_\d+$/); + }); + + test("successfully start by overwriting validator config and reset config on shutdown", async function () { + const customPort = 9999; + lt = new LocalTableland({ + silent: true, + registryPort: customPort, + }); + // Make sure it is not in use + const portInUse = await checkPortInUse(customPort); + expect(portInUse).to.equal(false); + + // Local Tableland should start successfully on custom Registry port + const startupExecutionTime = await measureExecutionTime( + async () => await lt?.start() + ); + executionTimes.start.push(startupExecutionTime); + const ltPort = getRegistryPort(lt); + expect(ltPort).to.equal(customPort); + + // Config file should have been updated to use custom port 9999 + const configFilePath = join( + lt.validator?.validatorDir ?? "", + "config.json" + ); + let configFile = readFileSync(configFilePath); + let validatorConfig = JSON.parse(configFile.toString()); + expect(validatorConfig.Chains[0].Registry.EthEndpoint).to.equal( + `ws://localhost:${ltPort}` + ); + + // Shut down Local Tableland and ensure validator config file is reset + const shutdownExecutionTime = await measureExecutionTime( + async () => await lt?.shutdown() + ); + executionTimes.shutdown.push(shutdownExecutionTime); + configFile = readFileSync(configFilePath); + validatorConfig = JSON.parse(configFile.toString()); + expect(validatorConfig.Chains[0].Registry.EthEndpoint).to.equal( + `ws://localhost:8545` + ); + }); + + test("fails to start due to custom port in use", async function () { + const customPort = 9999; + lt = new LocalTableland({ silent: true, registryPort: customPort }); + // Start a server on `customPort` to block Local Tableland from using it + server = await startMockServer(customPort); + // Check if it is in use + const portInUse = await checkPortInUse(customPort); + expect(portInUse).to.equal(true); + // Try to start Local Tableland, which will attempt to use `customPort` and fail + await expect( + (async function () { + await lt.start(); + })() + ).to.be.rejectedWith(`port ${customPort} already in use`); + // Ensure Local Tableland subprocesses did not start and/or are not hanging + expect(lt.validator).to.equal(undefined); + expect(lt.registry).to.equal(undefined); + }); + }); +}); diff --git a/packages/sdk/package.json b/packages/sdk/package.json index 281058ba..6c633979 100644 --- a/packages/sdk/package.json +++ b/packages/sdk/package.json @@ -1,6 +1,6 @@ { "name": "@tableland/sdk", - "version": "4.5.0", + "version": "4.5.1", "description": "A database client and helpers for the Tableland network", "publishConfig": { "access": "public" diff --git a/packages/sdk/src/helpers/config.ts b/packages/sdk/src/helpers/config.ts index 8e441df5..4d1934b6 100644 --- a/packages/sdk/src/helpers/config.ts +++ b/packages/sdk/src/helpers/config.ts @@ -1,4 +1,3 @@ -import fs from "node:fs"; import { type WaitableTransactionReceipt } from "../registry/utils.js"; import { type FetchConfig } from "../validator/client/index.js"; import { type ChainName, getBaseUrl } from "./chains.js"; @@ -81,7 +80,9 @@ export async function extractChainId(conn: Config = {}): Promise { return chainId; } -const findOrCreateFile = function (filepath: string): Buffer { +const findOrCreateFile = async function (filepath: string): Promise { + const fs = await getFsModule(); + if (!fs.existsSync(filepath)) { fs.writeFileSync(filepath, JSON.stringify({})); } @@ -89,13 +90,27 @@ const findOrCreateFile = function (filepath: string): Buffer { return fs.readFileSync(filepath); }; +// TODO: next major we should remove the jsonFileAliases helper and expose it +// in a different package since it doesn't work in the browser. +const getFsModule = (function () { + let fs: any; + return async function () { + // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions + if (fs) return fs; + + fs = await import(/* webpackIgnore: true */ "fs"); + return fs; + }; +})(); + export function jsonFileAliases(filepath: string): AliasesNameMap { return { read: async function (): Promise { - const jsonBuf = findOrCreateFile(filepath); + const jsonBuf = await findOrCreateFile(filepath); return JSON.parse(jsonBuf.toString()); }, write: async function (nameMap: NameMapping) { + const fs = await getFsModule(); fs.writeFileSync(filepath, JSON.stringify(nameMap)); }, };