diff --git a/services/database/migrations/20240729112258-compiled_contracts-constraints.js b/services/database/migrations/20240729112258-compiled_contracts-constraints.js index 56d1ba3e9..4201bc91c 100644 --- a/services/database/migrations/20240729112258-compiled_contracts-constraints.js +++ b/services/database/migrations/20240729112258-compiled_contracts-constraints.js @@ -107,11 +107,16 @@ exports.down = function (db, callback) { ALTER TABLE compiled_contracts ADD CONSTRAINT compiled_contracts_pseudo_pkey UNIQUE (compiler, language, creation_code_hash, runtime_code_hash); - DROP FUNCTION IF EXISTS validate_json_object_keys; - DROP FUNCTION IF EXISTS validate_json_object_keys; - DROP FUNCTION IF EXISTS validate_compilation_artifacts; - DROP FUNCTION IF EXISTS validate_creation_code_artifacts; + ALTER TABLE compiled_contracts + DROP CONSTRAINT IF EXISTS compilation_artifacts_object, + DROP CONSTRAINT IF EXISTS creation_code_artifacts_object, + DROP CONSTRAINT IF EXISTS runtime_code_artifacts_object; + DROP FUNCTION IF EXISTS validate_runtime_code_artifacts; + DROP FUNCTION IF EXISTS validate_creation_code_artifacts; + DROP FUNCTION IF EXISTS validate_compilation_artifacts; + DROP FUNCTION IF EXISTS validate_json_object_keys(jsonb, text[], text[]); + DROP FUNCTION IF EXISTS validate_json_object_keys(jsonb, text[]); `, ), ], diff --git a/services/database/migrations/20240918154734-add-sources-table.js b/services/database/migrations/20240918154734-add-sources-table.js new file mode 100644 index 000000000..2936b1f7d --- /dev/null +++ b/services/database/migrations/20240918154734-add-sources-table.js @@ -0,0 +1,90 @@ +"use strict"; + +var async = require("async"); + +var dbm; +var type; +var seed; + +/** + * We receive the dbmigrate dependency from dbmigrate initially. + * This enables us to not have to rely on NODE_PATH. + */ +exports.setup = function (options, seedLink) { + dbm = options.dbmigrate; + type = dbm.dataType; + seed = seedLink; +}; + +exports.up = function (db, callback) { + async.series( + [ + db.runSql.bind( + db, + ` + CREATE TABLE sources + ( + /* the sha256 hash of the source code */ + source_hash bytea NOT NULL PRIMARY KEY, + + /* the keccak256 hash of the source code */ + source_hash_keccak bytea NOT NULL, + + /* the actual source code content */ + content varchar NOT NULL, + + /* timestamps */ + created_at timestamptz NOT NULL DEFAULT NOW(), + updated_at timestamptz NOT NULL DEFAULT NOW(), + + /* ownership */ + created_by varchar NOT NULL DEFAULT (current_user), + updated_by varchar NOT NULL DEFAULT (current_user), + + CONSTRAINT source_hash_check CHECK (source_hash = digest(content, 'sha256')) + ); + + CREATE TABLE compiled_contracts_sources + ( + id uuid NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(), + + /* the specific compilation and the specific source */ + compilation_id uuid NOT NULL REFERENCES compiled_contracts(id), + source_hash bytea NOT NULL REFERENCES sources(source_hash), + + /* the file path associated with this source code in the compilation */ + path varchar NOT NULL, + + CONSTRAINT compiled_contracts_sources_pseudo_pkey UNIQUE (compilation_id, path) + ); + + CREATE INDEX compiled_contracts_sources_source_hash ON compiled_contracts_sources USING btree (source_hash); + CREATE INDEX compiled_contracts_sources_compilation_id ON compiled_contracts_sources (compilation_id); + + ALTER TABLE compiled_contracts DROP COLUMN sources; + `, + ), + ], + callback, + ); +}; + +exports.down = function (db, callback) { + async.series( + [ + db.runSql.bind( + db, + ` + DROP TABLE compiled_contracts_sources; + DROP TABLE sources; + ALTER TABLE compiled_contracts ADD COLUMN sources jsonb NOT NULL; + `, + ), + ], + callback, + ); +}; + +exports._meta = { + version: 1, +}; diff --git a/services/server/src/server/controllers/repository/repository.routes.ts b/services/server/src/server/controllers/repository/repository.routes.ts index bb17070bf..e5181d704 100644 --- a/services/server/src/server/controllers/repository/repository.routes.ts +++ b/services/server/src/server/controllers/repository/repository.routes.ts @@ -136,8 +136,8 @@ router /** * The following two routes are the replacement for the removed static file route that exposed RepositoryV1 - * The function getFileEndpoint get the sources from compiled_contracts.sources - * We need both of these routes because compiled_contracts.sources doesn't contain the metadata file + * The function getFileEndpoint get the sources from compiled_contracts_sources + * We need both of these routes because compiled_contracts_sources doesn't contain the metadata file */ // This route covers constructor-args.txt, creator-tx-hash.txt, library-map.json, immutable-references.json files diff --git a/services/server/src/server/services/storageServices/AbstractDatabaseService.ts b/services/server/src/server/services/storageServices/AbstractDatabaseService.ts index 76d43bdd8..8d599b64e 100644 --- a/services/server/src/server/services/storageServices/AbstractDatabaseService.ts +++ b/services/server/src/server/services/storageServices/AbstractDatabaseService.ts @@ -301,6 +301,18 @@ export default abstract class AbstractDatabaseService { }; } + const sourcesInformation = Object.keys(recompiledContract.solidity).map( + (path) => { + return { + path, + source_hash_keccak: bytesFromString( + keccak256(Buffer.from(recompiledContract.solidity[path])), + ), + content: recompiledContract.solidity[path], + }; + }, + ); + return { recompiledCreationCode, recompiledRuntimeCode: { @@ -331,13 +343,13 @@ export default abstract class AbstractDatabaseService { compiler: "solc", compiler_settings: Database.prepareCompilerSettings(recompiledContract), name: recompiledContract.name, - sources: recompiledContract.solidity, version: recompiledContract.compilerVersion, fully_qualified_name: `${compilationTargetPath}:${compilationTargetName}`, compilation_artifacts: compilationArtifacts, creation_code_artifacts: creationCodeArtifacts, runtime_code_artifacts: runtimeCodeArtifacts, }, + sourcesInformation, verifiedContract: { runtime_transformations, creation_transformations, @@ -357,7 +369,12 @@ export default abstract class AbstractDatabaseService { match: Match, databaseColumns: Database.DatabaseColumns, ): Promise { + // Get a client from the pool, so that we can execute all the insert queries within the same transaction + const client = await this.databasePool.connect(); + try { + // Start the sql transaction + await client.query("BEGIN"); let recompiledCreationCodeInsertResult: | QueryResult> | undefined; @@ -368,48 +385,45 @@ export default abstract class AbstractDatabaseService { // Add recompiled bytecodes if (databaseColumns.recompiledCreationCode) { recompiledCreationCodeInsertResult = await Database.insertCode( - this.databasePool, + client, databaseColumns.recompiledCreationCode, ); } const recompiledRuntimeCodeInsertResult = await Database.insertCode( - this.databasePool, + client, databaseColumns.recompiledRuntimeCode, ); // Add onchain bytecodes if (databaseColumns.onchainCreationCode) { onchainCreationCodeInsertResult = await Database.insertCode( - this.databasePool, + client, databaseColumns.onchainCreationCode, ); } const onchainRuntimeCodeInsertResult = await Database.insertCode( - this.databasePool, + client, databaseColumns.onchainRuntimeCode, ); // Add the onchain contract in contracts - const contractInsertResult = await Database.insertContract( - this.databasePool, - { - creation_bytecode_hash: - onchainCreationCodeInsertResult?.rows[0].bytecode_hash, - runtime_bytecode_hash: - onchainRuntimeCodeInsertResult.rows[0].bytecode_hash, - }, - ); + const contractInsertResult = await Database.insertContract(client, { + creation_bytecode_hash: + onchainCreationCodeInsertResult?.rows[0].bytecode_hash, + runtime_bytecode_hash: + onchainRuntimeCodeInsertResult.rows[0].bytecode_hash, + }); // add the onchain contract in contract_deployments const contractDeploymentInsertResult = - await Database.insertContractDeployment(this.databasePool, { + await Database.insertContractDeployment(client, { ...databaseColumns.contractDeployment, contract_id: contractInsertResult.rows[0].id, }); // insert new recompiled contract const compiledContractsInsertResult = - await Database.insertCompiledContract(this.databasePool, { + await Database.insertCompiledContract(client, { ...databaseColumns.compiledContract, creation_code_hash: recompiledCreationCodeInsertResult?.rows[0].bytecode_hash, @@ -417,18 +431,31 @@ export default abstract class AbstractDatabaseService { recompiledRuntimeCodeInsertResult.rows[0].bytecode_hash, }); + const compiledContractId = compiledContractsInsertResult.rows[0].id; + + await Database.insertCompiledContractsSources(client, { + sourcesInformation: databaseColumns.sourcesInformation, + compilation_id: compiledContractId, + }); + // insert new recompiled contract with newly added contract and compiledContract const verifiedContractInsertResult = - await Database.insertVerifiedContract(this.databasePool, { + await Database.insertVerifiedContract(client, { ...databaseColumns.verifiedContract, - compilation_id: compiledContractsInsertResult.rows[0].id, + compilation_id: compiledContractId, deployment_id: contractDeploymentInsertResult.rows[0].id, }); + // Commit the transaction + await client.query("COMMIT"); return verifiedContractInsertResult.rows[0].id; } catch (e) { + // Rollback the transaction in case of error + await client.query("ROLLBACK"); throw new Error( `cannot insert verified_contract address=${match.address} chainId=${match.chainId}\n${e}`, ); + } finally { + client.release(); } } @@ -446,7 +473,12 @@ export default abstract class AbstractDatabaseService { throw new Error("Missing onchain runtime bytecode"); } + // Get a client from the pool, so that we can execute all the insert queries within the same transaction + const client = await this.databasePool.connect(); try { + // Start the sql transaction + await client.query("BEGIN"); + let recompiledCreationCodeInsertResult: | QueryResult> | undefined; @@ -460,28 +492,25 @@ export default abstract class AbstractDatabaseService { databaseColumns.onchainCreationCode ) { onchainCreationCodeInsertResult = await Database.insertCode( - this.databasePool, + client, databaseColumns.onchainCreationCode, ); const onchainRuntimeCodeInsertResult = await Database.insertCode( - this.databasePool, + client, databaseColumns.onchainRuntimeCode, ); // Add the onchain contract in contracts - const contractInsertResult = await Database.insertContract( - this.databasePool, - { - creation_bytecode_hash: - onchainCreationCodeInsertResult.rows[0].bytecode_hash, - runtime_bytecode_hash: - onchainRuntimeCodeInsertResult.rows[0].bytecode_hash, - }, - ); + const contractInsertResult = await Database.insertContract(client, { + creation_bytecode_hash: + onchainCreationCodeInsertResult.rows[0].bytecode_hash, + runtime_bytecode_hash: + onchainRuntimeCodeInsertResult.rows[0].bytecode_hash, + }); // add the onchain contract in contract_deployments - await Database.updateContractDeployment(this.databasePool, { + await Database.updateContractDeployment(client, { ...databaseColumns.contractDeployment, contract_id: contractInsertResult.rows[0].id, id: existingVerifiedContractResult[0].deployment_id, @@ -494,18 +523,18 @@ export default abstract class AbstractDatabaseService { databaseColumns.recompiledCreationCode ) { recompiledCreationCodeInsertResult = await Database.insertCode( - this.databasePool, + client, databaseColumns.recompiledCreationCode, ); } const recompiledRuntimeCodeInsertResult = await Database.insertCode( - this.databasePool, + client, databaseColumns.recompiledRuntimeCode, ); // insert new recompiled contract const compiledContractsInsertResult = - await Database.insertCompiledContract(this.databasePool, { + await Database.insertCompiledContract(client, { ...databaseColumns.compiledContract, creation_code_hash: recompiledCreationCodeInsertResult?.rows[0].bytecode_hash, @@ -515,17 +544,23 @@ export default abstract class AbstractDatabaseService { // update verified contract with the newly added recompiled contract const verifiedContractInsertResult = - await Database.insertVerifiedContract(this.databasePool, { + await Database.insertVerifiedContract(client, { ...databaseColumns.verifiedContract, compilation_id: compiledContractsInsertResult.rows[0].id, deployment_id: existingVerifiedContractResult[0].deployment_id, }); + // Commit the transaction + await client.query("COMMIT"); return verifiedContractInsertResult.rows[0].id; } catch (e) { + // Rollback the transaction in case of error + await client.query("ROLLBACK"); throw new Error( `cannot update verified_contract address=${match.address} chainId=${match.chainId}\n${e}`, ); + } finally { + client.release(); } } diff --git a/services/server/src/server/services/storageServices/SourcifyDatabaseService.ts b/services/server/src/server/services/storageServices/SourcifyDatabaseService.ts index d9c4a8c2c..77e066f5c 100644 --- a/services/server/src/server/services/storageServices/SourcifyDatabaseService.ts +++ b/services/server/src/server/services/storageServices/SourcifyDatabaseService.ts @@ -254,7 +254,7 @@ export class SourcifyDatabaseService }; /** - * getFiles extracts the files from the database `compiled_contracts.sources` + * getFiles extracts the files from the database `compiled_contracts_sources` * and store them into FilesInfo.files, this object is then going to be formatted * by getTree, getContent and getFile. */ @@ -281,12 +281,18 @@ export class SourcifyDatabaseService ? "full" : "partial"; - const sources: { [index: string]: string } = {}; - - // Add 'sources/' prefix for API compatibility with the repoV1 responses. RepoV1 filesystem has all source files in 'sources/' - for (const path of Object.keys(sourcifyMatch.sources)) { - sources[`sources/${path}`] = sourcifyMatch.sources[path]; - } + const sourcesResult = await Database.getCompiledContractSources( + this.databasePool, + sourcifyMatch.compilation_id, + ); + const sources = sourcesResult.rows.reduce( + (sources, source) => { + // Add 'sources/' prefix for API compatibility with the repoV1 responses. RepoV1 filesystem has all source files in 'sources/' + sources[`sources/${source.path}`] = source.content; + return sources; + }, + {} as Record, + ); const files: FilesRawValue = {}; if (sourcifyMatch.metadata) { diff --git a/services/server/src/server/services/utils/database-util.ts b/services/server/src/server/services/utils/database-util.ts index c356bbefe..a73d2604a 100644 --- a/services/server/src/server/services/utils/database-util.ts +++ b/services/server/src/server/services/utils/database-util.ts @@ -10,7 +10,7 @@ import { TransformationValues, CompiledContractCborAuxdata, } from "@ethereum-sourcify/lib-sourcify"; -import { Pool, QueryResult } from "pg"; +import { Pool, PoolClient, QueryResult } from "pg"; import { Abi } from "abitype"; import { Bytes, @@ -19,6 +19,7 @@ import { BytesTypes, Nullable, } from "../../types"; +import logger from "../../../common/logger"; // eslint-disable-next-line @typescript-eslint/no-namespace export namespace Tables { @@ -57,7 +58,6 @@ export namespace Tables { storageLayout: Nullable; sources: Nullable; }; - sources: Record; compiler_settings: Object; creation_code_hash?: BytesSha; runtime_code_hash: BytesSha; @@ -88,6 +88,19 @@ export namespace Tables { creation_metadata_match: Nullable; } + export interface Sources { + source_hash: BytesSha; + source_hash_keccak: BytesKeccak; + content: string; + } + + export interface CompiledContractsSources { + id: string; + compilation_id: string; + source_hash: BytesSha; + path: string; + } + export interface SourcifyMatch { verified_contract_id: number; runtime_match: Status | null; @@ -109,6 +122,12 @@ export interface CompilationArtifactsSources { }; } +export interface SourceInformation { + source_hash_keccak: BytesKeccak; + content: string; + path: string; +} + // This object contains all Tables fields except foreign keys generated during INSERTs export interface DatabaseColumns { recompiledCreationCode?: Omit; @@ -124,6 +143,7 @@ export interface DatabaseColumns { Tables.VerifiedContract, "id" | "compilation_id" | "deployment_id" >; + sourcesInformation: SourceInformation[]; } export type GetVerifiedContractByChainAndAddressResult = @@ -154,8 +174,11 @@ export async function getVerifiedContractByChainAndAddress( } export type GetSourcifyMatchByChainAddressResult = Tables.SourcifyMatch & - Pick & - Pick & + Pick< + Tables.VerifiedContract, + "creation_values" | "runtime_values" | "compilation_id" + > & + Pick & Pick; export async function getSourcifyMatchByChainAddress( @@ -171,9 +194,9 @@ export async function getSourcifyMatchByChainAddress( sourcify_matches.creation_match, sourcify_matches.runtime_match, sourcify_matches.metadata, - compiled_contracts.sources, verified_contracts.creation_values, verified_contracts.runtime_values, + verified_contracts.compilation_id, compiled_contracts.runtime_code_artifacts, contract_deployments.transaction_hash FROM sourcify_matches @@ -193,8 +216,27 @@ ${ ); } -export async function insertCode( +export async function getCompiledContractSources( pool: Pool, + compilation_id: string, +): Promise< + QueryResult> +> { + return await pool.query( + ` + SELECT + compiled_contracts_sources.*, + sources.content + FROM compiled_contracts_sources + LEFT JOIN sources ON sources.source_hash = compiled_contracts_sources.source_hash + WHERE compilation_id = $1 + `, + [compilation_id], + ); +} + +export async function insertCode( + pool: PoolClient, { bytecode_hash_keccak, bytecode }: Omit, ): Promise>> { let codeInsertResult = await pool.query( @@ -216,7 +258,7 @@ export async function insertCode( } export async function insertContract( - pool: Pool, + pool: PoolClient, { creation_bytecode_hash, runtime_bytecode_hash, @@ -242,7 +284,7 @@ export async function insertContract( } export async function insertContractDeployment( - pool: Pool, + pool: PoolClient, { chain_id, address, @@ -293,7 +335,7 @@ export async function insertContractDeployment( } export async function insertCompiledContract( - pool: Pool, + pool: PoolClient, { compiler, version, @@ -301,7 +343,6 @@ export async function insertCompiledContract( name, fully_qualified_name, compilation_artifacts, - sources, compiler_settings, creation_code_hash, runtime_code_hash, @@ -318,13 +359,12 @@ export async function insertCompiledContract( name, fully_qualified_name, compilation_artifacts, - sources, compiler_settings, creation_code_hash, runtime_code_hash, creation_code_artifacts, runtime_code_artifacts - ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) ON CONFLICT (compiler, language, creation_code_hash, runtime_code_hash) DO NOTHING RETURNING * + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) ON CONFLICT (compiler, language, creation_code_hash, runtime_code_hash) DO NOTHING RETURNING * `, [ compiler, @@ -333,7 +373,6 @@ export async function insertCompiledContract( name, fully_qualified_name, compilation_artifacts, - sources, compiler_settings, creation_code_hash, runtime_code_hash, @@ -360,8 +399,99 @@ export async function insertCompiledContract( return compiledContractsInsertResult; } +export async function insertCompiledContractsSources( + pool: PoolClient, + { + sourcesInformation, + compilation_id, + }: { + sourcesInformation: SourceInformation[]; + compilation_id: string; + }, +) { + const sourceCodesQueryIndexes: string[] = []; + const sourceCodesQueryValues: any[] = []; + + // Loop through each `sourceInformation` to generate the `INSERT INTO sources` query placeholders and values + sourcesInformation.forEach((sourceCode, sourceCodesQueryIndex) => { + sourceCodesQueryIndexes.push( + // `sourceCodesQueryIndex * 2` comes from the number of unique values in the insert query, `sourceCode.content` is used for the first two columns + `(digest($${sourceCodesQueryIndex * 2 + 1}, 'sha256'), $${sourceCodesQueryIndex * 2 + 1}, $${sourceCodesQueryIndex * 2 + 2}::bytea)`, + ); + sourceCodesQueryValues.push(sourceCode.content); + sourceCodesQueryValues.push(sourceCode.source_hash_keccak); + }); + const sourceCodesQuery = `INSERT INTO sources ( + source_hash, + content, + source_hash_keccak + ) VALUES ${sourceCodesQueryIndexes.join(",")} ON CONFLICT (source_hash) DO NOTHING RETURNING *`; + const sourceCodesQueryResult = await pool.query( + sourceCodesQuery, + sourceCodesQueryValues, + ); + + // If some source codes already exist, fetch their hashes from the database + if (sourceCodesQueryResult.rows.length < sourcesInformation.length) { + const existingSourcesQuery = ` + SELECT * + FROM sources + WHERE source_hash_keccak = ANY($1::bytea[]) + `; + const existingSourcesResult = await pool.query(existingSourcesQuery, [ + sourcesInformation.map((source) => source.source_hash_keccak), + ]); + sourceCodesQueryResult.rows = existingSourcesResult.rows; + } + + const compiledContractsSourcesQueryIndexes: string[] = []; + const compiledContractsSourcesQueryValues: any[] = []; + + // Loop through each `sourceInformation` to generate the query placeholders and values for the `INSERT INTO compiled_contracts_sources` query. + // We separate these into two steps because we first need to batch insert into `sources`. + // After that, we use the newly inserted `sources.source_hash` to perform the batch insert into `compiled_contracts_sources`. + sourcesInformation.forEach( + (compiledContractsSource, compiledContractsSourcesQueryIndex) => { + compiledContractsSourcesQueryIndexes.push( + // `sourceCodesQueryIndex * 3` comes from the number of unique values in the insert query + `($${compiledContractsSourcesQueryIndex * 3 + 1}, $${compiledContractsSourcesQueryIndex * 3 + 2}, $${compiledContractsSourcesQueryIndex * 3 + 3})`, + ); + compiledContractsSourcesQueryValues.push(compilation_id); + const source = sourceCodesQueryResult.rows.find( + (sc) => + sc.source_hash_keccak.toString("hex") === + compiledContractsSource.source_hash_keccak.toString("hex"), + ); + if (!source) { + logger.error( + "Source not found while inserting compiled contracts sources", + { + compilation_id, + compiledContractsSource, + }, + ); + throw new Error( + "Source not found while inserting compiled contracts sources", + ); + } + compiledContractsSourcesQueryValues.push(source?.source_hash); + compiledContractsSourcesQueryValues.push(compiledContractsSource.path); + }, + ); + + const compiledContractsSourcesQuery = `INSERT INTO compiled_contracts_sources ( + compilation_id, + source_hash, + path + ) VALUES ${compiledContractsSourcesQueryIndexes.join(",")} ON CONFLICT (compilation_id, path) DO NOTHING`; + await pool.query( + compiledContractsSourcesQuery, + compiledContractsSourcesQueryValues, + ); +} + export async function insertVerifiedContract( - pool: Pool, + pool: PoolClient, { compilation_id, deployment_id, @@ -675,7 +805,7 @@ export async function getSourcifyMatchAddressesByChainAndMatch( } export async function updateContractDeployment( - pool: Pool, + pool: PoolClient, { id, transaction_hash, diff --git a/services/server/test/helpers/helpers.ts b/services/server/test/helpers/helpers.ts index c924eb55f..055e3aba0 100644 --- a/services/server/test/helpers/helpers.ts +++ b/services/server/test/helpers/helpers.ts @@ -245,6 +245,8 @@ export async function resetDatabase(sourcifyDatabase: Pool) { await sourcifyDatabase.query("DELETE FROM sourcify_matches"); await sourcifyDatabase.query("DELETE FROM verified_contracts"); await sourcifyDatabase.query("DELETE FROM contract_deployments"); + await sourcifyDatabase.query("DELETE FROM compiled_contracts_sources"); + await sourcifyDatabase.query("DELETE FROM sources"); await sourcifyDatabase.query("DELETE FROM compiled_contracts"); await sourcifyDatabase.query("DELETE FROM contracts"); await sourcifyDatabase.query("DELETE FROM code"); diff --git a/services/server/test/integration/database.spec.ts b/services/server/test/integration/database.spec.ts index 46f1a5edf..61800e4d9 100644 --- a/services/server/test/integration/database.spec.ts +++ b/services/server/test/integration/database.spec.ts @@ -5,9 +5,14 @@ import { id as keccak256str, keccak256 } from "ethers"; import { LocalChainFixture } from "../helpers/LocalChainFixture"; import { ServerFixture } from "../helpers/ServerFixture"; import type { MetadataSourceMap } from "@ethereum-sourcify/lib-sourcify"; +import * as databaseUtil from "../../src/server/services/utils/database-util"; import { bytesFromString } from "../../src/server/services/utils/database-util"; import crypto from "crypto"; import { Bytes } from "../../src/server/types"; +import sinon from "sinon"; +import { assertVerification } from "../helpers/assertions"; +import path from "path"; +import fs from "fs"; chai.use(chaiHttp); @@ -130,6 +135,7 @@ describe("Verifier Alliance database", function () { if (!serverFixture.sourcifyDatabase) { chai.assert.fail("No database on StorageService"); } + const addressBuffer = Buffer.from(address.substring(2), "hex"); const res = await serverFixture.sourcifyDatabase.query( `SELECT compilation_artifacts, @@ -160,7 +166,6 @@ describe("Verifier Alliance database", function () { cc.language, cc.name, cc.fully_qualified_name, - cc.sources, cc.compiler_settings, cd.chain_id, cd.address, @@ -177,7 +182,19 @@ describe("Verifier Alliance database", function () { LEFT JOIN code onchain_runtime_code ON onchain_runtime_code.code_hash = c.runtime_code_hash LEFT JOIN code onchain_creation_code ON onchain_creation_code.code_hash = c.creation_code_hash where cd.address = $1`, - [Buffer.from(address.substring(2), "hex")], + [addressBuffer], + ); + const resSources = await serverFixture.sourcifyDatabase.query( + `SELECT + ccs.*, + s.* + FROM verified_contracts vc + LEFT JOIN contract_deployments cd ON cd.id = vc.deployment_id + LEFT JOIN compiled_contracts cc ON cc.id = vc.compilation_id + LEFT JOIN compiled_contracts_sources ccs on ccs.compilation_id = cc.id + LEFT JOIN sources s ON s.source_hash = ccs.source_hash + where cd.address = $1`, + [addressBuffer], ); chai.expect(res.rowCount).to.equal(1); @@ -190,7 +207,14 @@ describe("Verifier Alliance database", function () { chai .expect(row.fully_qualified_name) .to.equal(testCase.fully_qualified_name); - chai.expect(row.sources).to.deep.equal(testCase.sources); + chai + .expect( + resSources.rows.reduce((sources, source) => { + sources[source.path] = source.content; + return sources; + }, {}), + ) + .to.deep.equal(testCase.sources); chai .expect(row.compiler_settings) .to.deep.equal(testCase.compiler_settings); @@ -346,3 +370,135 @@ describe("Verifier Alliance database", function () { // - genesis: right now not supported, // - partial_match_2: I don't know why we have this test }); + +describe("Sourcify database", function () { + const chainFixture = new LocalChainFixture(); + const serverFixture = new ServerFixture(); + const sandbox = sinon.createSandbox(); + + this.afterEach(() => { + sandbox.restore(); + }); + + it("When inserting a new match, nothing should be stored if an error occurs in the middle of the sql transaction", async () => { + // Sinon will throw an error if the function is called + sandbox + .stub(databaseUtil, "insertVerifiedContract") + .throws(new Error("Simulated database error")); + + const res = await chai + .request(serverFixture.server.app) + .post("/") + .field("address", chainFixture.defaultContractAddress) + .field("chain", chainFixture.chainId) + .attach("files", chainFixture.defaultContractMetadata, "metadata.json") + .attach("files", chainFixture.defaultContractSource, "Storage.sol"); + + // Request should fail + chai.expect(res).to.have.status(500); + + // query the database to check that nothing was stored, in any of the tables + const verifiedContracts = await serverFixture.sourcifyDatabase.query( + "SELECT * FROM verified_contracts", + ); + chai.expect(verifiedContracts.rows).to.have.length(0); + const contractDeployments = await serverFixture.sourcifyDatabase.query( + "SELECT * FROM contract_deployments", + ); + chai.expect(contractDeployments.rows).to.have.length(0); + const compiledContracts = await serverFixture.sourcifyDatabase.query( + "SELECT * FROM compiled_contracts", + ); + chai.expect(compiledContracts.rows).to.have.length(0); + const sources = await serverFixture.sourcifyDatabase.query( + "SELECT * FROM sources", + ); + chai.expect(sources.rows).to.have.length(0); + const code = + await serverFixture.sourcifyDatabase.query("SELECT * FROM code"); + chai.expect(code.rows).to.have.length(0); + const sourcifyMatches = await serverFixture.sourcifyDatabase.query( + "SELECT * FROM sourcify_matches", + ); + chai.expect(sourcifyMatches.rows).to.have.length(0); + }); + + it("When updating an existing match, nothing should be updated if an error occurs in the middle of the sql transaction", async () => { + const partialMetadata = ( + await import("../testcontracts/Storage/metadataModified.json") + ).default; + const partialMetadataBuffer = Buffer.from(JSON.stringify(partialMetadata)); + + const partialSourcePath = path.join( + __dirname, + "..", + "testcontracts", + "Storage", + "StorageModified.sol", + ); + const partialSourceBuffer = fs.readFileSync(partialSourcePath); + + let res = await chai + .request(serverFixture.server.app) + .post("/") + .field("address", chainFixture.defaultContractAddress) + .field("chain", chainFixture.chainId) + .attach("files", partialMetadataBuffer, "metadata.json") + .attach("files", partialSourceBuffer); + await assertVerification( + serverFixture.sourcifyDatabase, + null, + res, + null, + chainFixture.defaultContractAddress, + chainFixture.chainId, + "partial", + ); + + const beforeTables = [ + "verified_contracts", + "contract_deployments", + "contracts", + "compiled_contracts", + "sources", + "code", + "sourcify_matches", + ]; + const beforeData: Record = {}; + + for (const table of beforeTables) { + const result = await serverFixture.sourcifyDatabase.query( + `SELECT * FROM ${table}`, + ); + beforeData[table] = result.rows; + } + + // Sinon will throw an error if the function is called + sandbox + .stub(databaseUtil, "insertVerifiedContract") + .throws(new Error("Simulated database error")); + + res = await chai + .request(serverFixture.server.app) + .post("/") + .field("address", chainFixture.defaultContractAddress) + .field("chain", chainFixture.chainId) + .field("creatorTxHash", chainFixture.defaultContractCreatorTx) + .attach("files", chainFixture.defaultContractMetadata, "metadata.json") + .attach("files", chainFixture.defaultContractSource); + + // Request should fail + chai.expect(res).to.have.status(500); + + const afterData: Record = {}; + + for (const table of beforeTables) { + const result = await serverFixture.sourcifyDatabase.query( + `SELECT * FROM ${table}`, + ); + afterData[table] = result.rows; + } + + chai.expect(afterData).to.deep.equal(beforeData); + }); +});