diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f2814442..6983fbc2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,6 +5,8 @@ on: push: branches: - master + tags: + - "v*" pull_request: branches: - master @@ -13,20 +15,45 @@ concurrency: group: ci-${{ github.ref }} cancel-in-progress: true +defaults: + run: + shell: bash + jobs: build: needs: [lint, test] strategy: matrix: - environment: [ubuntu-latest, macos-latest, windows-latest] + environment: [ubuntu-latest, macos-13, macos-14, windows-latest] + permissions: + contents: read + id-token: write runs-on: ${{ matrix.environment }} + timeout-minutes: 10 + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-go@v4 + - name: Speed up Go (Windows) + if: runner.os == 'Windows' + run: | + DIR='D:\a\local' + mkdir -p "$DIR" && cd "$DIR" + mkdir go go-cache go-tmp tmpdir + go env -w GOPATH="$DIR\\go" + go env -w GOCACHE="$DIR\\go-cache" + go env -w GOTMPDIR="$DIR\\go-tmp" + printf '%s\\go\\bin\n' "$DIR" | tee -a "$GITHUB_PATH" + printf 'TMP=%s\\tmpdir\n' "$DIR" | tee -a "$GITHUB_ENV" + printf 'TEMP=%s\\tmpdir\n' "$DIR" | tee -a "$GITHUB_ENV" + go env + + - uses: actions/setup-go@v5 with: - go-version: "^1.18.1" + go-version: "^1.22" + # disable caching during release (tag) builds + cache: ${{ !startsWith(github.ref, 'refs/tags/') }} - name: Build (Linux and macOS) if: runner.os == 'Linux' || runner.os == 'macOS' @@ -34,7 +61,7 @@ jobs: - name: Compress (Linux and macOS) if: runner.os == 'Linux' || runner.os == 'macOS' - run: tar -czvf medusa.tar.gz medusa + run: tar -czvf medusa-${{ runner.os }}-${{ runner.arch }}.tar.gz medusa - name: Build (Windows) if: runner.os == 'Windows' @@ -42,24 +69,66 @@ jobs: - name: Compress (Windows) if: runner.os == 'Windows' - run: tar -czvf medusa.tar.gz medusa.exe + run: tar -czvf medusa-${{ runner.os }}-${{ runner.arch }}.tar.gz medusa.exe - - name: Upload artifact on merge to master - if: github.ref == 'refs/heads/master' - uses: actions/upload-artifact@v3 + - name: Rename for release + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') + shell: bash + run: | + [ ! -f medusa-Linux-X64.tar.gz ] || mv medusa-Linux-X64.tar.gz medusa-linux-x64.tar.gz + [ ! -f medusa-macOS-X64.tar.gz ] || mv medusa-macOS-X64.tar.gz medusa-mac-x64.tar.gz + [ ! -f medusa-macOS-ARM64.tar.gz ] || mv medusa-macOS-ARM64.tar.gz medusa-mac-arm64.tar.gz + [ ! -f medusa-Windows-X64.tar.gz ] || mv medusa-Windows-X64.tar.gz medusa-win-x64.tar.gz + + - name: Sign artifact + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') + uses: sigstore/gh-action-sigstore-python@v3.0.0 with: - name: medusa-${{ runner.os }} - path: medusa.tar.gz + inputs: ./medusa-*.tar.gz + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: medusa-${{ runner.os }}-${{ runner.arch }} + path: | + ./medusa-*.tar.gz + ./medusa-*.tar.gz.sigstore.json + + release: + needs: [build] + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') + permissions: + contents: write + + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Download binaries + uses: actions/download-artifact@v4 + with: + pattern: medusa-* + merge-multiple: true + + - name: Create GitHub release and upload binaries + uses: softprops/action-gh-release@9d7c94cfd0a1f3ed45544c887983e9fa900f0564 # v2.0.4 + with: + draft: true + name: "${{ github.ref_name }}" + files: | + ./medusa-*.tar.gz + ./medusa-*.tar.gz.sigstore.json lint: runs-on: ubuntu-latest + timeout-minutes: 10 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v5 with: - go-version: "^1.18.1" + go-version: "^1.22" - name: Actionlint run: | @@ -86,44 +155,83 @@ jobs: go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest golangci-lint run --timeout 5m0s + - name: Gencodec + run: | + go get github.com/fjl/gencodec + pushd fuzzing/config + go run github.com/fjl/gencodec -type FuzzingConfig -field-override fuzzingConfigMarshaling -out gen_fuzzing_config.go + git diff --exit-code -- . + popd test: strategy: matrix: - environment: [ubuntu-latest, macos-latest, windows-latest] + environment: [ubuntu-latest, macos-13, macos-14, windows-latest] runs-on: ${{ matrix.environment }} + timeout-minutes: 20 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-go@v3 + - uses: actions/setup-python@v5 with: - go-version: "^1.18.1" + python-version: "3.10" - - uses: actions/setup-node@v3 + - name: Speed up Go, Python, Node (Windows) + if: runner.os == 'Windows' + run: | + DIR='D:\a\local' + mkdir -p "$DIR" && cd "$DIR" + echo "::group::Go" + mkdir -p go go-cache go-tmp tmpdir + go env -w GOPATH="$DIR\\go" + go env -w GOCACHE="$DIR\\go-cache" + go env -w GOTMPDIR="$DIR\\go-tmp" + printf '%s\\go\\bin\n' "$DIR" | tee -a "$GITHUB_PATH" + printf 'TMP=%s\\tmpdir\n' "$DIR" | tee -a "$GITHUB_ENV" + printf 'TEMP=%s\\tmpdir\n' "$DIR" | tee -a "$GITHUB_ENV" + go env + echo "::endgroup::" + echo "::group::Python" + python3 -m venv venv + printf '%s\\venv\\Scripts\n' "$DIR" | tee -a "$GITHUB_PATH" + printf 'VIRTUAL_ENV=%s\\venv\n' "$DIR" | tee -a "$GITHUB_ENV" + echo "::endgroup::" + echo "::group::Node" + npm config set cache "$DIR\\npm-cache" --global + echo "::endgroup::" + + - uses: actions/setup-go@v5 + with: + go-version: "^1.22" + + - uses: actions/setup-node@v4 with: node-version: 18.15 - name: Install Node dependencies - run: npm install -g hardhat truffle + run: npm install hardhat - name: Install Python dependencies run: | - pip3 install solc-select - pip3 install slither-analyzer + pip3 install --no-cache-dir solc-select crytic-compile - name: Install solc run: | - solc-select install 0.8.17 - solc-select use 0.8.17 + solc-select use 0.8.17 --always-install - name: Test run: go test ./... all-checks: - needs: [lint, test, build] + if: always() + needs: [lint, test, build, release] runs-on: ubuntu-latest steps: - - run: true + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe # v1.2.2 + with: + allowed-skips: release + jobs: ${{ toJSON(needs) }} diff --git a/.gitignore b/.gitignore index 4ec8c7ba..75874601 100644 --- a/.gitignore +++ b/.gitignore @@ -13,10 +13,16 @@ # Dependency directories (remove the comment below to include it) # vendor/ -*node_modules/ -# PyCharm project dir +# Goland project dir .idea/ # Build results result +*node_modules/ + +# Medusa binary +medusa + +# Medusa docs +docs/book diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 00000000..a0108738 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1 @@ +* @Xenomega @anishnaik diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7c1bdddf..dcae4472 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,6 +18,7 @@ To better understand how to make responsible open source contributions, consider When introducing changes to the project, note the following requirements: - All changes to the main branch should be introduced via pull requests. +- All branches created for pull requests should follow the `dev/*` naming convention, e.g. `dev/coverage-reports`. - Every pull request **must** be reviewed by at least one other peer prior to being merged into the main branch. - Code **must** be supported on Linux, macOS, and Windows. - Code **must** be sufficiently commented: @@ -33,6 +34,47 @@ If any of these requirements are violated, you should expect your pull request t Pull request reviewers have a responsibility to uphold these standards. Even if a pull request is compliant with these requirements, a reviewer which identifies an opportunity to document some caveat (such as a `// TODO: ` comment) should request it be added prior to pull request approval. +### Linters + +Several linters and security checkers are run on the PRs. + +#### Go + +To install + +- `go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest` + +To run + +- `go fmt ./...` +- `golangci-lint run --timeout 5m0s` + +#### Markdown/Json/Yaml + +To install + +- `npm install -g prettier` +- `npm install -g markdown-link-check@3.10.3` + +To run + +- `prettier '**.json' '**/*.md' '**/*.yml' '!(pkg)'` +- `find . -name '*.md' -print0 | xargs -0 -n1 markdown-link-check --config .github/workflows/resources/markdown_link_check.json` + +To format (overwrite files) + +- `prettier '**.json' '**/*.md' '**/*.yml' '!(pkg)' -w` + +#### Github action + +To install + +- `go install github.com/rhysd/actionlint/cmd/actionlint@latest` + +To run + +- `actionlint` + ### Cross-platform considerations - Ensure file/directory names do not exceed 32 characters in length to minimize filepath length issues on Windows. File/directory names should be shorter than this where possible. diff --git a/DEV.md b/DEV.md new file mode 100644 index 00000000..7543fe56 --- /dev/null +++ b/DEV.md @@ -0,0 +1,104 @@ +# Debugging and Development + +## Debugging + +The following scripts are available for Medusa developers for debugging changes to the fuzzer. + +### Corpus diff + +The corpus diff script is used to compare two corpora and identify the methods that are present in one but not the other. This is useful for identifying methods that are missing from a corpus that should be present. + +```shell +python3 scripts/corpus_diff.py corpus1 corpus2 +``` + +```shell +Methods only in ~/corpus1: +- clampSplitWeight(uint32,uint32) + +Methods only in ~/corpus2: + +``` + +### Corpus stats + +The corpus stats script is used to generate statistics about a corpus. This includes the number of sequences, the average length of sequences, and the frequency of methods called. + +```shell +python3 scripts/corpus_stats.py corpus +``` + +```shell +Number of Sequences in ~/corpus: 130 + +Average Length of Transactions List: 43 + +Frequency of Methods Called: +- testReceiversReceivedSplit(uint8): 280 +- setMaxEndHints(uint32,uint32): 174 +- setStreamBalanceWithdrawAll(uint8): 139 +- giveClampedAmount(uint8,uint8,uint128): 136 +- receiveStreamsSplitAndCollectToSelf(uint8): 133 +- testSqueezeViewVsActual(uint8,uint8): 128 +- testSqueeze(uint8,uint8): 128 +- testSetStreamBalance(uint8,int128): 128 +- addStreamWithClamping(uint8,uint8,uint160,uint32,uint32,int128): 125 +- removeAllSplits(uint8): 118 +- testSplittableAfterSplit(uint8): 113 +- testSqueezableVsReceived(uint8): 111 +- testBalanceAtInFuture(uint8,uint8,uint160): 108 +- testRemoveStreamShouldNotRevert(uint8,uint256): 103 +- invariantWithdrawAllTokensShouldNotRevert(): 103 +- collect(uint8,uint8): 101 +- invariantAmtPerSecVsMinAmtPerSec(uint8,uint256): 98 +- testSqueezableAmountCantBeWithdrawn(uint8,uint8): 97 +- split(uint8): 97 +- invariantWithdrawAllTokens(): 95 +- testReceiveStreams(uint8,uint32): 93 +- invariantAccountingVsTokenBalance(): 92 +- testSqueezeWithFuzzedHistoryShouldNotRevert(uint8,uint8,uint256,bytes32): 91 +- testSqueezableAmountCantBeUndone(uint8,uint8,uint160,uint32,uint32,int128): 87 +- testCollect(uint8,uint8): 86 +- testSetStreamBalanceWithdrawAllShouldNotRevert(uint8): 86 +- testAddStreamShouldNotRevert(uint8,uint8,uint160,uint32,uint32,int128): 85 +- testReceiveStreamsShouldNotRevert(uint8): 84 +- addSplitsReceiver(uint8,uint8,uint32): 84 +- setStreamBalanceWithClamping(uint8,int128): 82 +- addSplitsReceiverWithClamping(uint8,uint8,uint32): 80 +- testSetStreamBalanceShouldNotRevert(uint8,int128): 80 +- testSplitShouldNotRevert(uint8): 80 +- squeezeAllAndReceiveAndSplitAndCollectToSelf(uint8): 79 +- addStreamImmediatelySqueezable(uint8,uint8,uint160): 79 +- testSetSplitsShouldNotRevert(uint8,uint8,uint32): 78 +- invariantSumAmtDeltaIsZero(uint8): 78 +- testReceiveStreamsViewConsistency(uint8,uint32): 76 +- squeezeToSelf(uint8): 74 +- collectToSelf(uint8): 72 +- setStreams(uint8,uint8,uint160,uint32,uint32,int128): 70 +- receiveStreamsAllCycles(uint8): 69 +- invariantWithdrawShouldAlwaysFail(uint256): 68 +- addStream(uint8,uint8,uint160,uint32,uint32,int128): 68 +- squeezeWithFuzzedHistory(uint8,uint8,uint256,bytes32): 67 +- setStreamsWithClamping(uint8,uint8,uint160,uint32,uint32,int128): 67 +- splitAndCollectToSelf(uint8): 67 +- testSqueezeWithFullyHashedHistory(uint8,uint8): 65 +- give(uint8,uint8,uint128): 65 +- setSplits(uint8,uint8,uint32): 65 +- testSqueezeTwice(uint8,uint8,uint256,bytes32): 65 +- testSetStreamsShouldNotRevert(uint8,uint8,uint160,uint32,uint32,int128): 64 +- squeezeAllSenders(uint8): 63 +- removeStream(uint8,uint256): 62 +- testCollectableAfterSplit(uint8): 58 +- testCollectShouldNotRevert(uint8,uint8): 56 +- testReceiveStreamsViewVsActual(uint8,uint32): 55 +- receiveStreams(uint8,uint32): 55 +- setSplitsWithClamping(uint8,uint8,uint32): 55 +- testGiveShouldNotRevert(uint8,uint8,uint128): 47 +- setStreamBalance(uint8,int128): 47 +- squeezeWithDefaultHistory(uint8,uint8): 45 +- testSplitViewVsActual(uint8): 45 +- testAddSplitsShouldNotRevert(uint8,uint8,uint32): 30 +- testSqueezeWithDefaultHistoryShouldNotRevert(uint8,uint8): 23 + +Number of Unique Methods: 65 +``` diff --git a/README.md b/README.md index 65809192..162145df 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ `medusa` is a cross-platform [go-ethereum](https://github.com/ethereum/go-ethereum/)-based smart contract fuzzer inspired by [Echidna](https://github.com/crytic/echidna). It provides parallelized fuzz testing of smart contracts through CLI, or its Go API that allows custom user-extended testing methodology. -**Disclaimer**: Please note that `medusa` is an **experimental** smart contract fuzzer. Currently, it should _not_ be adopted into production systems. We intend for `medusa` to reach the same capabilities and maturity that Echidna has. Until then, be careful using `medusa` as your primary smart contract fuzz testing solution. Additionally, please be aware that the Go-level testing API is still **under development** and is subject to breaking changes. +**Disclaimer**: The Go-level testing API is still **under development** and is subject to breaking changes. ## Features @@ -17,94 +17,34 @@ It provides parallelized fuzz testing of smart contracts through CLI, or its Go - ✔️**Extensible low-level testing API** through events and hooks provided throughout the fuzzer, workers, and test chains. - ❌ **Extensible high-level testing API** allowing for the addition of per-contract or global post call/event property tests with minimal effort. -## Installation +## Documentation -### Precompiled binaries +To learn more about how to install and use `medusa`, please refer to our [documentation](./docs/src/SUMMARY.md). -To use `medusa`, first ensure you have [crytic-compile](https://github.com/crytic/crytic-compile) and a suitable compilation framework (e.g. `solc`, `truffle`, `hardhat`) installed on your machine. +For a better viewing experience, we recommend you install [mdbook](https://rust-lang.github.io/mdBook/guide/installation.html) +and then running the following steps from medusa's source directory: -You can then fetch the latest binaries for your platform from our [GitHub Releases](https://github.com/crytic/medusa/releases) page. - -### Building from source - -#### Requirements - -- You must have at least go 1.18 installed. -- [Windows only] The `go-ethereum` dependency may require [TDM-GCC](https://jmeubank.github.io/tdm-gcc/) to build. - -#### Steps - -- Clone the repository, then execute `go build` in the repository root. -- Go will automatically fetch all dependencies and build a binary for you in the same folder when completed. - -## Usage - -Although we recommend users run `medusa` in a configuration file driven format for more customizability, you can also run `medusa` through the CLI directly. -We provide instructions for both below. - -We recommend you familiarize yourself with writing [assertion](https://github.com/crytic/building-secure-contracts/blob/master/program-analysis/echidna/basic/assertion-checking.md) and [property](https://github.com/crytic/building-secure-contracts/blob/master/program-analysis/echidna/introduction/how-to-test-a-property.md) tests for Echidna. `medusa` supports Echidna-like property testing with config-defined function prefixes (default: `fuzz_`) and assertion testing using Solidity `assert(...)` statements. - -### Command-line only - -You can use the following command to run `medusa` against a contract: - -```console -medusa fuzz --target contract.sol --deployment-order ContractName +```bash +cd docs +mdbook serve ``` -Where: - -- `--target` specifies the path `crytic-compile` should use to compile contracts -- `--deployment-order` specifies comma-separated names of contracts to be deployed for testing. +## Install -**Note:** Check out the [command-line interface](https://github.com/crytic/medusa/wiki/Command-Line-Interface) wiki page, or run `medusa --help` for more information. +MacOS users can install the latest release of `medusa` using Homebrew: -### Configuration file driven +```shell -The preferred method to use medusa is to enter your project directory (hardhat directory, or directory with your contracts), -then execute the following command: - -```console -medusa init +brew install medusa ``` -This will create a `medusa.json` in your current folder. There are two required fields that should be set correctly: - -- Set your `"target"` under `"compilation"` to point to the file/directory which `crytic-compile` should use to build your contracts. -- Put the names of any contracts you wish to deploy and run tests against in the `"deploymentOrder"` field. This must be non-empty. - -After you have a configuration in place, you can execute: +The master branch can be installed using the following command: -```console -medusa fuzz +```shell +brew install --HEAD medusa ``` -This will use the `medusa.json` configuration in the current directory and begin the fuzzing campaign. - -**Note:** Check out the [project configuration](https://github.com/crytic/medusa/wiki/Project-Configuration) wiki page, or run `medusa --help` for more information. - -## Running Unit Tests - -First, install [crytic-compile](https://github.com/crytic/crytic-compile), [solc-select](https://github.com/crytic/solc-select), and ensure you have `solc` (version >=0.8.7), `truffle`, and `hardhat` available on your system. - -- From the root of the repository, invoke `go test -v ./...` on through command-line to run tests from all packages at or below the root. - - Or enter each package directory to run `go test -v .` to test the immediate package. - - Note: the `-v` parameter provides verbose output. -- Otherwise, use an IDE like [GoLand](https://www.jetbrains.com/go/) to visualize the tests and logically separate output. - -## FAQs - -**Why create `medusa` if Echidna is already working just fine?** - -With `medusa`, we are exploring a different EVM implementation and language for our smart contract fuzzer. We believe that -experimenting with a new fuzzer provides us with the following benefits: - -- Since `medusa` is written in Go, we believe that this will **lower the barrier of entry for external contributions**. - We have taken great care in thoroughly commenting our code so that it is easy for new contributors to get up-to-speed and start contributing! -- The use of Go allows us to build an API to hook into the various parts of the fuzzer to build custom testing methodologies. See the [API Overview (WIP)]() section in the Wiki for more details. -- Our forked version of go-ethereum, [`medusa-geth`](https://github.com/crytic/medusa-geth), exhibits behavior that is closer to that of the EVM in production environments. -- We can take the lessons we learned while developing Echidna to create a fuzzer that is just as feature-rich but with additional capabilities to - create powerful and unique testing methodologies. +For more information on building from source or obtaining binaries for Windows and Linux, please refer to the [installation guide](./docs/src/getting_started/installation.md). ## Contributing diff --git a/chain/cheat_code_contract.go b/chain/cheat_code_contract.go index 43b8fa2f..08ceb6ea 100644 --- a/chain/cheat_code_contract.go +++ b/chain/cheat_code_contract.go @@ -2,6 +2,8 @@ package chain import ( "encoding/binary" + "fmt" + "github.com/crytic/medusa/logging" "github.com/ethereum/go-ethereum/accounts/abi" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/vm" @@ -52,6 +54,30 @@ type cheatCodeRawReturnData struct { Err error } +// getCheatCodeProviders obtains a cheatCodeTracer (used to power cheat code analysis) and associated CheatCodeContract +// objects linked to the tracer (providing on-chain callable methods as an entry point). These objects are attached to +// the TestChain to enable cheat code functionality. +// Returns the tracer and associated pre-compile contracts, or an error, if one occurred. +func getCheatCodeProviders() (*cheatCodeTracer, []*CheatCodeContract, error) { + // Create a cheat code tracer and attach it to the chain. + tracer := newCheatCodeTracer() + + // Obtain our standard cheat code pre-compile + stdCheatCodeContract, err := getStandardCheatCodeContract(tracer) + if err != nil { + return nil, nil, err + } + + // Obtain the console.log pre-compile + consoleCheatCodeContract, err := getConsoleLogCheatCodeContract(tracer) + if err != nil { + return nil, nil, err + } + + // Return the tracer and precompiles + return tracer, []*CheatCodeContract{stdCheatCodeContract, consoleCheatCodeContract}, nil +} + // newCheatCodeContract returns a new precompiledContract which uses the attached cheatCodeTracer for execution // context. func newCheatCodeContract(tracer *cheatCodeTracer, address common.Address, name string) *CheatCodeContract { @@ -96,16 +122,16 @@ func (c *CheatCodeContract) Abi() *abi.ABI { } // addMethod adds a new method to the precompiled contract. -// Returns an error if one occurred. +// Throws a panic if either the name is the empty string or the handler is nil. func (c *CheatCodeContract) addMethod(name string, inputs abi.Arguments, outputs abi.Arguments, handler cheatCodeMethodHandler) { // Verify a method name was provided if name == "" { - panic("could not add method to precompiled cheatcode contract, empty method name provided") + logging.GlobalLogger.Panic("Failed to add method to precompile cheatcode contract", fmt.Errorf("empty method name provided")) } // Verify a method handler was provided if handler == nil { - panic("could not add method to precompiled cheatcode contract, nil method handler provided") + logging.GlobalLogger.Panic("Failed to add method to precompile cheatcode contract", fmt.Errorf("nil method handler provided")) } // Set the method information in our method lookup @@ -115,7 +141,6 @@ func (c *CheatCodeContract) addMethod(name string, inputs abi.Arguments, outputs method: method, handler: handler, } - // Add the method to the ABI. // Note: Normally the key here should be the method name, not sig. But cheat code contracts have duplicate // method names with different parameter types, so we use this so they don't override. diff --git a/chain/cheat_code_tracer.go b/chain/cheat_code_tracer.go index 699ddc16..208b7115 100644 --- a/chain/cheat_code_tracer.go +++ b/chain/cheat_code_tracer.go @@ -1,10 +1,14 @@ package chain import ( + "math/big" + "github.com/crytic/medusa/chain/types" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/tracing" + coretypes "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/vm" - "math/big" + "github.com/ethereum/go-ethereum/eth/tracers" ) // cheatCodeTracer represents an EVM.Logger which tracks and patches EVM execution state to enable extended @@ -18,13 +22,16 @@ type cheatCodeTracer struct { callDepth uint64 // evm refers to the EVM instance last captured. - evm *vm.EVM + evmContext *tracing.VMContext // callFrames represents per-call-frame data deployment information being captured by the tracer. callFrames []*cheatCodeTracerCallFrame // results stores the tracer output after a transaction has concluded. results *cheatCodeTracerResults + + // nativeTracer is the underlying tracer interface that the cheatcode tracer follows + nativeTracer *TestChainTracer } // cheatCodeTracerCallFrame represents per-call-frame data traced by a cheatCodeTracer. @@ -57,13 +64,14 @@ type cheatCodeTracerCallFrame struct { // vmOp describes the current call frame's last instruction executed. vmOp vm.OpCode // vmScope describes the current call frame's scope context. - vmScope *vm.ScopeContext + vmScope tracing.OpContext // vmReturnData describes the current call frame's return data (set on exit). vmReturnData []byte // vmErr describes the current call frame's returned error (set on exit), nil if no error. vmErr error } +// cheatCodeTracerResults holds the hooks that need to be executed when the chain reverts. type cheatCodeTracerResults struct { // onChainRevertHooks describes hooks which are to be executed when the chain reverts. onChainRevertHooks types.GenericHookFuncs @@ -72,9 +80,25 @@ type cheatCodeTracerResults struct { // newCheatCodeTracer creates a cheatCodeTracer and returns it. func newCheatCodeTracer() *cheatCodeTracer { tracer := &cheatCodeTracer{} + innerTracer := &tracers.Tracer{ + Hooks: &tracing.Hooks{ + OnTxStart: tracer.OnTxStart, + OnTxEnd: tracer.OnTxEnd, + OnEnter: tracer.OnEnter, + OnExit: tracer.OnExit, + OnOpcode: tracer.OnOpcode, + }, + } + tracer.nativeTracer = &TestChainTracer{Tracer: innerTracer, CaptureTxEndSetAdditionalResults: tracer.CaptureTxEndSetAdditionalResults} + return tracer } +// NativeTracer returns the underlying TestChainTracer. +func (t *cheatCodeTracer) NativeTracer() *TestChainTracer { + return t.nativeTracer +} + // bindToChain is called by the TestChain which created the tracer to set its reference. // Note: This is done because of the cheat code system's dependency on the genesis block, as well as chain's dependency // on it, which prevents the chain being set in the tracer on initialization. @@ -98,80 +122,78 @@ func (t *cheatCodeTracer) CurrentCallFrame() *cheatCodeTracerCallFrame { return t.callFrames[t.callDepth] } -// CaptureTxStart is called upon the start of transaction execution, as defined by vm.EVMLogger. -func (t *cheatCodeTracer) CaptureTxStart(gasLimit uint64) { +// OnTxStart is called upon the start of transaction execution, as defined by tracers.Tracer. +func (t *cheatCodeTracer) OnTxStart(vm *tracing.VMContext, tx *coretypes.Transaction, from common.Address) { // Reset our capture state t.callDepth = 0 t.callFrames = make([]*cheatCodeTracerCallFrame, 0) t.results = &cheatCodeTracerResults{ onChainRevertHooks: nil, } + // Store our evm reference + t.evmContext = vm } -// CaptureTxEnd is called upon the end of transaction execution, as defined by vm.EVMLogger. -func (t *cheatCodeTracer) CaptureTxEnd(restGas uint64) { +// OnTxEnd is called upon the end of transaction execution, as defined by tracers.Tracer +func (t *cheatCodeTracer) OnTxEnd(*coretypes.Receipt, error) { } -// CaptureStart initializes the tracing operation for the top of a call frame, as defined by vm.EVMLogger. -func (t *cheatCodeTracer) CaptureStart(env *vm.EVM, from common.Address, to common.Address, create bool, input []byte, gas uint64, value *big.Int) { - // Store our evm reference - t.evm = env +// OnEnter initializes the tracing operation for the top of a call frame, as defined by tracers.Tracer. +func (t *cheatCodeTracer) OnEnter(depth int, typ byte, from common.Address, to common.Address, input []byte, gas uint64, value *big.Int) { + // Check to see if this is the top level call frame + isTopLevelFrame := depth == 0 + var callFrameData *cheatCodeTracerCallFrame + if isTopLevelFrame { + // Create our call frame struct to track data for this initial entry call frame. + callFrameData = &cheatCodeTracerCallFrame{} + } else { + // We haven't updated our call depth yet, so obtain the "previous" call frame (current for now) + previousCallFrame := t.CurrentCallFrame() + + // Create our call frame struct to track data for this initial entry call frame. + // We forward our "next frame hooks" to this frame, then clear them from the previous frame. + callFrameData = &cheatCodeTracerCallFrame{ + onFrameExitRestoreHooks: previousCallFrame.onNextFrameExitRestoreHooks, + } + previousCallFrame.onNextFrameExitRestoreHooks = nil + + // Increase our call depth now that we're entering a new call frame. + t.callDepth++ + } - // Create our call frame struct to track data for this initial entry call frame. - callFrameData := &cheatCodeTracerCallFrame{} + // Append our new call frame t.callFrames = append(t.callFrames, callFrameData) + + // Note: We do not execute events for "next frame enter" here, as we do not yet have scope information. + // Those events are executed when the first EVM instruction is executed in the new scope. } -// CaptureEnd is called after a call to finalize tracing completes for the top of a call frame, as defined by vm.EVMLogger. -func (t *cheatCodeTracer) CaptureEnd(output []byte, gasUsed uint64, err error) { +// OnExit is called after a call to finalize tracing completes for the top of a call frame, as defined by tracers.Tracer. +func (t *cheatCodeTracer) OnExit(depth int, output []byte, gasUsed uint64, err error, reverted bool) { // Execute all current call frame exit hooks exitingCallFrame := t.callFrames[t.callDepth] exitingCallFrame.onFrameExitRestoreHooks.Execute(false, true) - exitingCallFrame.onTopFrameExitRestoreHooks.Execute(false, true) - // If we didn't encounter an error in this call frame, we push our upward propagating revert events up one frame. - if err == nil { - // Store these revert hooks in our results. - t.results.onChainRevertHooks = append(t.results.onChainRevertHooks, exitingCallFrame.onChainRevertRestoreHooks...) + var parentCallFrame *cheatCodeTracerCallFrame + if depth == 0 { + // If this is the top-level call frame, execute all of its exit hooks + exitingCallFrame.onTopFrameExitRestoreHooks.Execute(false, true) } else { - // We hit an error, so a revert occurred before this tx was committed. - exitingCallFrame.onChainRevertRestoreHooks.Execute(false, true) + // If not, retrieve the parent call frame + parentCallFrame = t.callFrames[t.callDepth-1] } // We're exiting the current frame, so remove our frame data. t.callFrames = t.callFrames[:t.callDepth] -} - -// CaptureEnter is called upon entering of the call frame, as defined by vm.EVMLogger. -func (t *cheatCodeTracer) CaptureEnter(typ vm.OpCode, from common.Address, to common.Address, input []byte, gas uint64, value *big.Int) { - // We haven't updated our call depth yet, so obtain the "previous" call frame (current for now) - previousCallFrame := t.CurrentCallFrame() - // Increase our call depth now that we're entering a new call frame. - t.callDepth++ - - // Create our call frame struct to track data for this initial entry call frame. - // We forward our "next frame hooks" to this frame, then clear them from the previous frame. - callFrameData := &cheatCodeTracerCallFrame{ - onFrameExitRestoreHooks: previousCallFrame.onNextFrameExitRestoreHooks, - } - previousCallFrame.onNextFrameExitRestoreHooks = nil - t.callFrames = append(t.callFrames, callFrameData) - - // Note: We do not execute events for "next frame enter" here, as we do not yet have scope information. - // Those events are executed when the first EVM instruction is executed in the new scope. -} - -// CaptureExit is called upon exiting of the call frame, as defined by vm.EVMLogger. -func (t *cheatCodeTracer) CaptureExit(output []byte, gasUsed uint64, err error) { - // Execute all current call frame exit hooks - exitingCallFrame := t.callFrames[t.callDepth] - exitingCallFrame.onFrameExitRestoreHooks.Execute(false, true) - parentCallFrame := t.callFrames[t.callDepth-1] - - // If we didn't encounter an error in this call frame, we push our upward propagating revert events up one frame. - if err == nil { + // If we didn't encounter an error in this call frame, we push our upward propagating restore events up one frame. + if err == nil && depth == 0 { + // Since this is the top call frame, we add the revert events to the results of the tracer and return early + t.results.onChainRevertHooks = append(t.results.onChainRevertHooks, exitingCallFrame.onChainRevertRestoreHooks...) + return + } else if err == nil { + // Propagate hooks up to the parent call frame parentCallFrame.onTopFrameExitRestoreHooks = append(parentCallFrame.onTopFrameExitRestoreHooks, exitingCallFrame.onTopFrameExitRestoreHooks...) parentCallFrame.onChainRevertRestoreHooks = append(parentCallFrame.onChainRevertRestoreHooks, exitingCallFrame.onChainRevertRestoreHooks...) } else { @@ -179,22 +201,19 @@ func (t *cheatCodeTracer) CaptureExit(output []byte, gasUsed uint64, err error) exitingCallFrame.onChainRevertRestoreHooks.Execute(false, true) } - // We're exiting the current frame, so remove our frame data. - t.callFrames = t.callFrames[:t.callDepth] - // Decrease our call depth now that we've exited a call frame. t.callDepth-- } -// CaptureState records data from an EVM state update, as defined by vm.EVMLogger. -func (t *cheatCodeTracer) CaptureState(pc uint64, op vm.OpCode, gas, cost uint64, scope *vm.ScopeContext, rData []byte, depth int, vmErr error) { +// OnOpcode records data from an EVM state update, as defined by tracers.Tracer. +func (t *cheatCodeTracer) OnOpcode(pc uint64, op byte, gas, cost uint64, scope tracing.OpContext, rData []byte, depth int, err error) { // Set our current frame information. currentCallFrame := t.CurrentCallFrame() currentCallFrame.vmPc = pc - currentCallFrame.vmOp = op + currentCallFrame.vmOp = vm.OpCode(op) currentCallFrame.vmScope = scope currentCallFrame.vmReturnData = rData - currentCallFrame.vmErr = vmErr + currentCallFrame.vmErr = err // We execute our entered next frame hooks here (from our previous call frame), as we now have scope information. if t.callDepth > 0 { @@ -202,11 +221,6 @@ func (t *cheatCodeTracer) CaptureState(pc uint64, op vm.OpCode, gas, cost uint64 } } -// CaptureFault records an execution fault, as defined by vm.EVMLogger. -func (t *cheatCodeTracer) CaptureFault(pc uint64, op vm.OpCode, gas, cost uint64, scope *vm.ScopeContext, depth int, err error) { - -} - // CaptureTxEndSetAdditionalResults can be used to set additional results captured from execution tracing. If this // tracer is used during transaction execution (block creation), the results can later be queried from the block. // This method will only be called on the added tracer if it implements the extended TestChainTracer interface. diff --git a/chain/config/config.go b/chain/config/config.go index dddc38c3..6a3b0c4e 100644 --- a/chain/config/config.go +++ b/chain/config/config.go @@ -13,6 +13,12 @@ type TestChainConfig struct { // CheatCodeConfig indicates the configuration for EVM cheat codes to use. CheatCodeConfig CheatCodeConfig `json:"cheatCodes"` + + // SkipAccountChecks skips account pre-checks like nonce validation and disallowing non-EOA tx senders (this is done in eth_call, for instance). + SkipAccountChecks bool `json:"skipAccountChecks"` + + // ContractAddressOverrides describes contracts that are going to be deployed at deterministic addresses + ContractAddressOverrides map[common.Hash]common.Address `json:"contractAddressOverrides,omitempty"` } // CheatCodeConfig describes any configuration options related to the use of vm extensions (a.k.a. cheat codes) @@ -27,9 +33,16 @@ type CheatCodeConfig struct { // GetVMConfigExtensions derives a vm.ConfigExtensions from the provided TestChainConfig. func (t *TestChainConfig) GetVMConfigExtensions() *vm.ConfigExtensions { - // Obtain our cheat code precompiled contracts. + // Create a copy of the contract address overrides that can be ephemerally updated by medusa-geth + contractAddressOverrides := make(map[common.Hash]common.Address) + for hash, addr := range t.ContractAddressOverrides { + contractAddressOverrides[hash] = addr + } + + // Obtain our vm config extensions data structure return &vm.ConfigExtensions{ - OverrideCodeSizeCheck: t.CodeSizeCheckDisabled, - AdditionalPrecompiles: make(map[common.Address]vm.PrecompiledContract), + OverrideCodeSizeCheck: t.CodeSizeCheckDisabled, + AdditionalPrecompiles: make(map[common.Address]vm.PrecompiledContract), + ContractAddressOverrides: contractAddressOverrides, } } diff --git a/chain/config/config_defaults.go b/chain/config/config_defaults.go index 72ed8915..5a611c1c 100644 --- a/chain/config/config_defaults.go +++ b/chain/config/config_defaults.go @@ -10,6 +10,7 @@ func DefaultTestChainConfig() (*TestChainConfig, error) { CheatCodesEnabled: true, EnableFFI: false, }, + SkipAccountChecks: true, } // Return the generated configuration. diff --git a/chain/console_log_cheat_code_contract.go b/chain/console_log_cheat_code_contract.go new file mode 100644 index 00000000..ee086c59 --- /dev/null +++ b/chain/console_log_cheat_code_contract.go @@ -0,0 +1,125 @@ +package chain + +import ( + "strconv" + + "github.com/crytic/medusa/utils" + "github.com/ethereum/go-ethereum/accounts/abi" + "github.com/ethereum/go-ethereum/common" +) + +// ConsoleLogContractAddress is the address for the console.log precompile contract +var ConsoleLogContractAddress = common.HexToAddress("0x000000000000000000636F6e736F6c652e6c6f67") + +// getConsoleLogCheatCodeContract obtains a CheatCodeContract which implements the console.log functions. +// Returns the precompiled contract, or an error if there is one. +func getConsoleLogCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, error) { + // Create a new precompile to add methods to. + contract := newCheatCodeContract(tracer, ConsoleLogContractAddress, "Console") + + // Define all the ABI types needed for console.log functions + typeUint256, err := abi.NewType("uint256", "", nil) + if err != nil { + return nil, err + } + typeInt256, err := abi.NewType("int256", "", nil) + if err != nil { + return nil, err + } + typeString, err := abi.NewType("string", "", nil) + if err != nil { + return nil, err + } + typeBool, err := abi.NewType("bool", "", nil) + if err != nil { + return nil, err + } + typeAddress, err := abi.NewType("address", "", nil) + if err != nil { + return nil, err + } + typeBytes, err := abi.NewType("bytes", "", nil) + if err != nil { + return nil, err + } + + // We will store all the fixed byte (e.g. byte1, byte2) in a mapping + const numFixedByteTypes = 32 + fixedByteTypes := make(map[int]abi.Type, numFixedByteTypes) + for i := 1; i <= numFixedByteTypes; i++ { + byteString := "bytes" + strconv.FormatInt(int64(i), 10) + fixedByteTypes[i], err = abi.NewType(byteString, "", nil) + if err != nil { + return nil, err + } + } + + // We have a few special log function signatures outside all the permutations of (string, uint256, bool, address). + // These include log(int256), log(bytes), log(bytesX), and log(string, uint256). So, we will manually create these + // signatures and then programmatically iterate through all the permutations. + + // Note that none of the functions actually do anything - they just have to be callable so that the execution + // traces can show the arguments that the user wants to log! + + // log(int256): Log an int256 + contract.addMethod("log", abi.Arguments{{Type: typeInt256}}, abi.Arguments{}, + func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { + return nil, nil + }, + ) + + // log(bytes): Log bytes + contract.addMethod("log", abi.Arguments{{Type: typeBytes}}, abi.Arguments{}, + func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { + return nil, nil + }, + ) + + // Now, we will add the logBytes1, logBytes2, and so on in a loop + for i := 1; i <= numFixedByteTypes; i++ { + // Create local copy of abi argument + fixedByteType := fixedByteTypes[i] + + // Add the method + contract.addMethod("log", abi.Arguments{{Type: fixedByteType}}, abi.Arguments{}, + func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { + return nil, nil + }, + ) + } + + // log(string, int256): Log string with an int where the string could be formatted + contract.addMethod("log", abi.Arguments{{Type: typeString}, {Type: typeInt256}}, abi.Arguments{}, + func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { + return nil, nil + }, + ) + + // These are the four parameter types that console.log() accepts + choices := abi.Arguments{{Type: typeUint256}, {Type: typeString}, {Type: typeBool}, {Type: typeAddress}} + + // Create all possible permutations (with repetition) where the number of choices increases from 1...len(choices) + permutations := make([]abi.Arguments, 0) + for n := 1; n <= len(choices); n++ { + nextSetOfPermutations := utils.PermutationsWithRepetition(choices, n) + for _, permutation := range nextSetOfPermutations { + permutations = append(permutations, permutation) + } + } + + // Iterate across each permutation to add their associated event and function handler + for i := 0; i < len(permutations); i++ { + // Make a local copy of the current permutation + permutation := permutations[i] + + // Create the function handler + contract.addMethod("log", permutation, abi.Arguments{}, + func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { + return nil, nil + }, + ) + } + + // Return our precompile contract information. + return contract, nil +} diff --git a/chain/cheat_codes.go b/chain/standard_cheat_code_contract.go similarity index 77% rename from chain/cheat_codes.go rename to chain/standard_cheat_code_contract.go index 8404eaa4..97eb3288 100644 --- a/chain/cheat_codes.go +++ b/chain/standard_cheat_code_contract.go @@ -4,40 +4,31 @@ import ( "crypto/ecdsa" "encoding/hex" "fmt" - "github.com/crytic/medusa/utils" - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" "math/big" "os/exec" "strconv" "strings" + + "github.com/crytic/medusa/utils" + "github.com/ethereum/go-ethereum/accounts/abi" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/tracing" + "github.com/ethereum/go-ethereum/core/vm" + "github.com/ethereum/go-ethereum/crypto" + "github.com/holiman/uint256" ) -// getCheatCodeProviders obtains a cheatCodeTracer (used to power cheat code analysis) and associated CheatCodeContract -// objects linked to the tracer (providing on-chain callable methods as an entry point). These objects are attached to -// the TestChain to enable cheat code functionality. -// Returns the tracer and associated pre-compile contracts, or an error, if one occurred. -func getCheatCodeProviders() (*cheatCodeTracer, []*CheatCodeContract, error) { - // Create a cheat code tracer and attach it to the chain. - tracer := newCheatCodeTracer() +// StandardCheatcodeContractAddress is the address for the standard cheatcode contract +var StandardCheatcodeContractAddress = common.HexToAddress("0x7109709ECfa91a80626fF3989D68f67F5b1DD12D") - // Obtain our cheat code pre-compiles - stdCheatCodeContract, err := getStandardCheatCodeContract(tracer) - if err != nil { - return nil, nil, err - } - - // Return the tracer and precompiles - return tracer, []*CheatCodeContract{stdCheatCodeContract}, nil -} +// MaxUint64 holds the max value an uint64 can take +var _, MaxUint64 = utils.GetIntegerConstraints(false, 64) // getStandardCheatCodeContract obtains a CheatCodeContract which implements common cheat codes. // Returns the precompiled contract, or an error if one occurs. func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, error) { - // Define our address for this precompile contract, then create a new precompile to add methods to. - contractAddress := common.HexToAddress("0x7109709ECfa91a80626fF3989D68f67F5b1DD12D") - contract := newCheatCodeContract(tracer, contractAddress, "StdCheats") + // Create a new precompile to add methods to. + contract := newCheatCodeContract(tracer, StandardCheatcodeContractAddress, "StdCheats") // Define some basic ABI argument types typeAddress, err := abi.NewType("address", "", nil) @@ -83,13 +74,22 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, // Warp: Sets VM timestamp contract.addMethod( - "warp", abi.Arguments{{Type: typeUint64}}, abi.Arguments{}, + "warp", abi.Arguments{{Type: typeUint256}}, abi.Arguments{}, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { // Maintain our changes until the transaction exits. - original := tracer.evm.Context.Time - tracer.evm.Context.Time = inputs[0].(uint64) + originalTime := tracer.chain.pendingBlockContext.Time + + // Retrieve new timestamp and make sure it is LEQ max value of an uint64 + newTime := inputs[0].(*big.Int) + if newTime.Cmp(MaxUint64) > 0 { + return nil, cheatCodeRevertData([]byte("warp: timestamp exceeds max value of type(uint64).max")) + } + + // Set the time + tracer.chain.pendingBlockContext.Time = newTime.Uint64() tracer.CurrentCallFrame().onTopFrameExitRestoreHooks.Push(func() { - tracer.evm.Context.Time = original + // Reset the time + tracer.chain.pendingBlockContext.Time = originalTime }) return nil, nil }, @@ -100,10 +100,10 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, "roll", abi.Arguments{{Type: typeUint256}}, abi.Arguments{}, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { // Maintain our changes until the transaction exits. - original := new(big.Int).Set(tracer.evm.Context.BlockNumber) - tracer.evm.Context.BlockNumber.Set(inputs[0].(*big.Int)) + original := new(big.Int).Set(tracer.chain.pendingBlockContext.BlockNumber) + tracer.chain.pendingBlockContext.BlockNumber.Set(inputs[0].(*big.Int)) tracer.CurrentCallFrame().onTopFrameExitRestoreHooks.Push(func() { - tracer.evm.Context.BlockNumber.Set(original) + tracer.chain.pendingBlockContext.BlockNumber.Set(original) }) return nil, nil }, @@ -114,38 +114,28 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, "fee", abi.Arguments{{Type: typeUint256}}, abi.Arguments{}, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { // Maintain our changes until the transaction exits. - original := new(big.Int).Set(tracer.evm.Context.BaseFee) - tracer.evm.Context.BaseFee.Set(inputs[0].(*big.Int)) + original := new(big.Int).Set(tracer.chain.pendingBlockContext.BaseFee) + tracer.chain.pendingBlockContext.BaseFee.Set(inputs[0].(*big.Int)) tracer.CurrentCallFrame().onTopFrameExitRestoreHooks.Push(func() { - tracer.evm.Context.BaseFee.Set(original) + tracer.chain.pendingBlockContext.BaseFee.Set(original) }) return nil, nil }, ) - // Difficulty: Sets VM block number + // Difficulty: Updates difficulty contract.addMethod( "difficulty", abi.Arguments{{Type: typeUint256}}, abi.Arguments{}, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { // Maintain our changes until the transaction exits. - - // Obtain our spoofed difficulty spoofedDifficulty := inputs[0].(*big.Int) spoofedDifficultyHash := common.BigToHash(spoofedDifficulty) - - // Change difficulty in block context. - originalDifficulty := new(big.Int).Set(tracer.evm.Context.Difficulty) - tracer.evm.Context.Difficulty.Set(spoofedDifficulty) - tracer.CurrentCallFrame().onTopFrameExitRestoreHooks.Push(func() { - tracer.evm.Context.Difficulty.Set(originalDifficulty) - }) + originalRandom := tracer.chain.pendingBlockContext.Random // In newer evm versions, block.difficulty uses opRandom instead of opDifficulty. - // TODO: Check chain config here to see if the EVM version is 'Paris' or the consensus upgrade occurred. - originalRandom := tracer.evm.Context.Random - tracer.evm.Context.Random = &spoofedDifficultyHash + tracer.chain.pendingBlockContext.Random = &spoofedDifficultyHash tracer.CurrentCallFrame().onTopFrameExitRestoreHooks.Push(func() { - tracer.evm.Context.Random = originalRandom + tracer.chain.pendingBlockContext.Random = originalRandom }) return nil, nil }, @@ -156,7 +146,7 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, "chainId", abi.Arguments{{Type: typeUint256}}, abi.Arguments{}, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { // Maintain our changes unless this code path reverts or the whole transaction is reverted in the chain. - chainConfig := tracer.evm.ChainConfig() + chainConfig := tracer.chain.pendingBlockChainConfig original := chainConfig.ChainID chainConfig.ChainID = inputs[0].(*big.Int) tracer.CurrentCallFrame().onChainRevertRestoreHooks.Push(func() { @@ -173,7 +163,7 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, account := inputs[0].(common.Address) slot := inputs[1].([32]byte) value := inputs[2].([32]byte) - tracer.evm.StateDB.SetState(account, slot, value) + tracer.chain.State().SetState(account, slot, value) return nil, nil }, ) @@ -184,7 +174,7 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { account := inputs[0].(common.Address) slot := inputs[1].([32]byte) - value := tracer.evm.StateDB.GetState(account, slot) + value := tracer.chain.State().GetState(account, slot) return []any{value}, nil }, ) @@ -195,7 +185,7 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { account := inputs[0].(common.Address) code := inputs[1].([]byte) - tracer.evm.StateDB.SetCode(account, code) + tracer.chain.State().SetCode(account, code) return nil, nil }, ) @@ -206,9 +196,9 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { account := inputs[0].(common.Address) newBalance := inputs[1].(*big.Int) - originalBalance := tracer.evm.StateDB.GetBalance(account) - diff := new(big.Int).Sub(newBalance, originalBalance) - tracer.evm.StateDB.AddBalance(account, diff) + newBalanceUint256 := new(uint256.Int) + newBalanceUint256.SetFromBig(newBalance) + tracer.chain.State().SetBalance(account, newBalanceUint256, tracing.BalanceChangeUnspecified) return nil, nil }, ) @@ -218,7 +208,7 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, "getNonce", abi.Arguments{{Type: typeAddress}}, abi.Arguments{{Type: typeUint64}}, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { account := inputs[0].(common.Address) - nonce := tracer.evm.StateDB.GetNonce(account) + nonce := tracer.chain.State().GetNonce(account) return []any{nonce}, nil }, ) @@ -229,7 +219,7 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { account := inputs[0].(common.Address) nonce := inputs[1].(uint64) - tracer.evm.StateDB.SetNonce(account, nonce) + tracer.chain.State().SetNonce(account, nonce) return nil, nil }, ) @@ -239,10 +229,10 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, "coinbase", abi.Arguments{{Type: typeAddress}}, abi.Arguments{}, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { // Maintain our changes until the transaction exits. - original := tracer.evm.Context.Coinbase - tracer.evm.Context.Coinbase = inputs[0].(common.Address) + original := tracer.chain.pendingBlockContext.Coinbase + tracer.chain.pendingBlockContext.Coinbase = inputs[0].(common.Address) tracer.CurrentCallFrame().onTopFrameExitRestoreHooks.Push(func() { - tracer.evm.Context.Coinbase = original + tracer.chain.pendingBlockContext.Coinbase = original }) return nil, nil }, @@ -259,10 +249,12 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, // We entered the scope we want to prank, store the original value, patch, and add a hook to restore it // when this frame is exited. prankCallFrame := tracer.CurrentCallFrame() - original := prankCallFrame.vmScope.Contract.CallerAddress - prankCallFrame.vmScope.Contract.CallerAddress = inputs[0].(common.Address) + // We can cast OpContext to ScopeContext because that is the type passed to OnOpcode. + scopeContext := prankCallFrame.vmScope.(*vm.ScopeContext) + original := scopeContext.Caller() + scopeContext.Contract.CallerAddress = inputs[0].(common.Address) prankCallFrame.onFrameExitRestoreHooks.Push(func() { - prankCallFrame.vmScope.Contract.CallerAddress = original + scopeContext.Contract.CallerAddress = original }) }) return nil, nil @@ -278,15 +270,38 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, cheatCodeCallerFrame := tracer.PreviousCallFrame() // Store the original value, patch, and add a hook to restore it when this frame is exited. - original := cheatCodeCallerFrame.vmScope.Contract.CallerAddress - cheatCodeCallerFrame.vmScope.Contract.CallerAddress = inputs[0].(common.Address) + // We can cast OpContext to ScopeContext because that is the type passed to OnOpcode. + scopeContext := cheatCodeCallerFrame.vmScope.(*vm.ScopeContext) + original := scopeContext.Caller() + scopeContext.Contract.CallerAddress = inputs[0].(common.Address) cheatCodeCallerFrame.onFrameExitRestoreHooks.Push(func() { - cheatCodeCallerFrame.vmScope.Contract.CallerAddress = original + scopeContext.Contract.CallerAddress = original }) return nil, nil }, ) + // snapshot: Takes a snapshot of the current state of the evm and returns the id associated with the snapshot + contract.addMethod( + "snapshot", abi.Arguments{}, abi.Arguments{{Type: typeUint256}}, + func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { + snapshotID := tracer.chain.State().Snapshot() + + return []any{snapshotID}, nil + }, + ) + + // revertTo(uint256): Revert the state of the evm to a previous snapshot. Takes the snapshot id to revert to. + contract.addMethod( + "revertTo", abi.Arguments{{Type: typeUint256}}, abi.Arguments{{Type: typeBool}}, + func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { + snapshotID := inputs[0].(*big.Int) + tracer.chain.State().RevertToSnapshot(int(snapshotID.Int64())) + + return []any{true}, nil + }, + ) + // FFI: Run arbitrary command on base OS contract.addMethod( "ffi", abi.Arguments{{Type: typeStringSlice}}, abi.Arguments{{Type: typeBytes}}, @@ -340,8 +355,12 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, // addr: Compute the address for a given private key contract.addMethod("addr", abi.Arguments{{Type: typeUint256}}, abi.Arguments{{Type: typeAddress}}, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { - // Using TOECDSAUnsafe b/c the private key is guaranteed to be of length 256 bits, at most - privateKey := crypto.ToECDSAUnsafe(inputs[0].(*big.Int).Bytes()) + // Get the private key object + privateKey, err := utils.GetPrivateKey(inputs[0].(*big.Int).Bytes()) + if err != nil { + errorMessage := "addr: " + err.Error() + return nil, cheatCodeRevertData([]byte(errorMessage)) + } // Get ECDSA public key publicKey := privateKey.Public().(*ecdsa.PublicKey) @@ -357,11 +376,15 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, contract.addMethod("sign", abi.Arguments{{Type: typeUint256}, {Type: typeBytes32}}, abi.Arguments{{Type: typeUint8}, {Type: typeBytes32}, {Type: typeBytes32}}, func(tracer *cheatCodeTracer, inputs []any) ([]any, *cheatCodeRawReturnData) { - // Using TOECDSAUnsafe b/c the private key is guaranteed to be of length 256 bits, at most - privateKey := crypto.ToECDSAUnsafe(inputs[0].(*big.Int).Bytes()) - digest := inputs[1].([32]byte) + // Get the private key object + privateKey, err := utils.GetPrivateKey(inputs[0].(*big.Int).Bytes()) + if err != nil { + errorMessage := "sign: " + err.Error() + return nil, cheatCodeRevertData([]byte(errorMessage)) + } // Sign digest + digest := inputs[1].([32]byte) sig, err := crypto.Sign(digest[:], privateKey) if err != nil { return nil, cheatCodeRevertData([]byte("sign: malformed input to signature algorithm")) @@ -448,7 +471,7 @@ func getStandardCheatCodeContract(tracer *cheatCodeTracer) (*CheatCodeContract, // Use a fixed array and copy the data over var bArray [32]byte - copy(bArray[:], bSlice[:32]) + copy(bArray[:], bSlice) return []any{bArray}, nil }, diff --git a/chain/test_chain.go b/chain/test_chain.go index ec60979c..0137c6c3 100644 --- a/chain/test_chain.go +++ b/chain/test_chain.go @@ -3,11 +3,16 @@ package chain import ( "errors" "fmt" + "math/big" + "sort" + "github.com/crytic/medusa/chain/config" "github.com/ethereum/go-ethereum/core/rawdb" + "github.com/ethereum/go-ethereum/core/tracing" + "github.com/ethereum/go-ethereum/triedb" + "github.com/ethereum/go-ethereum/triedb/hashdb" + "github.com/holiman/uint256" "golang.org/x/exp/maps" - "math/big" - "sort" chainTypes "github.com/crytic/medusa/chain/types" "github.com/crytic/medusa/chain/vendored" @@ -19,9 +24,7 @@ import ( "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/vm" "github.com/ethereum/go-ethereum/ethdb" - "github.com/ethereum/go-ethereum/ethdb/memorydb" "github.com/ethereum/go-ethereum/params" - "github.com/ethereum/go-ethereum/trie" ) // TestChain represents a simulated Ethereum chain used for testing. It maintains blocks in-memory and strips away @@ -35,6 +38,14 @@ type TestChain struct { // pendingBlock is a block currently under construction by the chain which has not yet been committed. pendingBlock *chainTypes.Block + // pendingBlockContext is the vm.BlockContext for the current pending block. This is used by cheatcodes to override the EVM + // interpreter's behavior. This should be set when a new EVM is created by the test chain e.g. using vm.NewEVM. + pendingBlockContext *vm.BlockContext + + // pendingBlockChainConfig is params.ChainConfig for the current pending block. This is used by cheatcodes to override + // the chain ID. This should be set when a new EVM is created by the test chain e.g. using vm.NewEVM. + pendingBlockChainConfig *params.ChainConfig + // BlockGasLimit defines the maximum amount of gas that can be consumed by transactions in a block. // Transactions which push the block gas usage beyond this limit will not be added to a block without error. BlockGasLimit uint64 @@ -64,14 +75,11 @@ type TestChain struct { // This is constructed over the kvstore. db ethdb.Database - // keyValueStore represents the underlying key-value store used to construct the db. - keyValueStore *memorydb.Database - - // callTracerRouter forwards vm.EVMLogger and TestChainTracer calls to any instances added to it. This + // callTracerRouter forwards tracers.Tracer and TestChainTracer calls to any instances added to it. This // router is used for non-state changing calls. callTracerRouter *TestChainTracerRouter - // transactionTracerRouter forwards vm.EVMLogger and TestChainTracer calls to any instances added to it. This + // transactionTracerRouter forwards tracers.Tracer and TestChainTracer calls to any instances added to it. This // router is used for transaction execution when constructing blocks. transactionTracerRouter *TestChainTracerRouter @@ -82,13 +90,22 @@ type TestChain struct { // NewTestChain creates a simulated Ethereum backend used for testing, or returns an error if one occurred. // This creates a test chain with a test chain configuration and the provided genesis allocation and config. // If a nil config is provided, a default one is used. -func NewTestChain(genesisAlloc core.GenesisAlloc, testChainConfig *config.TestChainConfig) (*TestChain, error) { +func NewTestChain(genesisAlloc types.GenesisAlloc, testChainConfig *config.TestChainConfig) (*TestChain, error) { // Copy our chain config, so it is not shared across chains. chainConfig, err := utils.CopyChainConfig(params.TestChainConfig) if err != nil { return nil, err } + // TODO: go-ethereum doesn't set cancun start time for THEIR test `ChainConfig` struct. + // Note: We have our own `TestChainConfig` definition that is different (second argument in this function). + // We should allow the user to provide a go-ethereum `ChainConfig` to do custom fork selection, inside of our + // `TestChainConfig` definition. Or we should wrap it in our own struct to simplify the options and not pollute + // our overall medusa project config. + cancunTime := uint64(0) + chainConfig.ShanghaiTime = &cancunTime + chainConfig.CancunTime = &cancunTime + // Create our genesis definition with our default chain config. genesisDefinition := &core.Genesis{ Config: chainConfig, @@ -119,19 +136,20 @@ func NewTestChain(genesisAlloc core.GenesisAlloc, testChainConfig *config.TestCh // Obtain our VM extensions from our config vmConfigExtensions := testChainConfig.GetVMConfigExtensions() - // Obtain our cheatcode providers - cheatTracer, cheatContracts, err := getCheatCodeProviders() - if err != nil { - return nil, err - } - // Add all cheat code contract addresses to the genesis config. This is done because cheat codes are implemented // as pre-compiles, but we still want code to exist at these addresses, because smart contracts compiled with // newer solidity versions perform code size checks prior to external calls. // Additionally, add the pre-compiled cheat code contract to our vm extensions. + var cheatTracer *cheatCodeTracer if testChainConfig.CheatCodeConfig.CheatCodesEnabled { + // Obtain our cheatcode providers + var cheatContracts []*CheatCodeContract + cheatTracer, cheatContracts, err = getCheatCodeProviders() + if err != nil { + return nil, err + } for _, cheatContract := range cheatContracts { - genesisDefinition.Alloc[cheatContract.address] = core.GenesisAccount{ + genesisDefinition.Alloc[cheatContract.address] = types.Account{ Balance: big.NewInt(0), Code: []byte{0xFF}, } @@ -140,19 +158,22 @@ func NewTestChain(genesisAlloc core.GenesisAlloc, testChainConfig *config.TestCh } // Create an in-memory database - keyValueStore := memorydb.New() - db := rawdb.NewDatabase(keyValueStore) + db := rawdb.NewMemoryDatabase() + dbConfig := &triedb.Config{ + HashDB: hashdb.Defaults, + // TODO Add cleanCacheSize of 256 depending on the resolution of this issue https://github.com/ethereum/go-ethereum/issues/30099 + // PathDB: pathdb.Defaults, + } + trieDB := triedb.NewDatabase(db, dbConfig) // Commit our genesis definition to get a genesis block. - genesisBlock := genesisDefinition.MustCommit(db) + genesisBlock := genesisDefinition.MustCommit(db, trieDB) // Convert our genesis block (go-ethereum type) to a test chain block. testChainGenesisBlock := chainTypes.NewBlock(genesisBlock.Header()) // Create our state database over-top our database. - stateDatabase := state.NewDatabaseWithConfig(db, &trie.Config{ - Cache: 256, // this is important in keeping the database performant, so it does not need to fetch repetitively. - }) + stateDatabase := state.NewDatabaseWithConfig(db, dbConfig) // Create a tracer forwarder to support the addition of multiple tracers for transaction and call execution. transactionTracerRouter := NewTestChainTracerRouter() @@ -164,7 +185,6 @@ func NewTestChain(genesisAlloc core.GenesisAlloc, testChainConfig *config.TestCh BlockGasLimit: genesisBlock.Header().GasLimit, blocks: []*chainTypes.Block{testChainGenesisBlock}, pendingBlock: nil, - keyValueStore: keyValueStore, db: db, state: nil, stateDatabase: stateDatabase, @@ -176,9 +196,9 @@ func NewTestChain(genesisAlloc core.GenesisAlloc, testChainConfig *config.TestCh } // Add our internal tracers to this chain. - chain.AddTracer(newTestChainDeploymentsTracer(), true, false) + chain.AddTracer(newTestChainDeploymentsTracer().NativeTracer(), true, false) if testChainConfig.CheatCodeConfig.CheatCodesEnabled { - chain.AddTracer(cheatTracer, true, true) + chain.AddTracer(cheatTracer.NativeTracer(), true, true) cheatTracer.bindToChain(chain) } @@ -187,10 +207,21 @@ func NewTestChain(genesisAlloc core.GenesisAlloc, testChainConfig *config.TestCh if err != nil { return nil, err } + + // Set our state database logger e.g. to monitor OnCodeChange events. + stateDB.SetLogger(transactionTracerRouter.NativeTracer().Tracer.Hooks) chain.state = stateDB return chain, nil } +// Close will release any objects from the TestChain that must be _explicitly_ released. Currently, the one object that +// must be explicitly released is the stateDB trie's underlying cache. This cache, if not released, prevents the TestChain +// object from being freed by the garbage collector and causes a severe memory leak. +func (t *TestChain) Close() { + // Reset the state DB's cache + t.stateDatabase.TrieDB().Close() +} + // Clone recreates the current TestChain state into a new instance. This simply reconstructs the block/chain state // but does not perform any other API-related changes such as adding additional tracers the original had. Additionally, // this does not clone pending blocks. The provided method, if non-nil, is used as callback to provide an intermediate @@ -249,9 +280,9 @@ func (t *TestChain) Clone(onCreateFunc func(chain *TestChain) error) (*TestChain return targetChain, nil } -// AddTracer adds a given vm.EVMLogger or TestChainTracer to the TestChain. If directed, the tracer will be attached +// AddTracer adds a given tracers.Tracer or TestChainTracer to the TestChain. If directed, the tracer will be attached // for transactions and/or non-state changing calls made via CallContract. -func (t *TestChain) AddTracer(tracer vm.EVMLogger, txs bool, calls bool) { +func (t *TestChain) AddTracer(tracer *TestChainTracer, txs bool, calls bool) { if txs { t.transactionTracerRouter.AddTracer(tracer) } @@ -527,7 +558,7 @@ func (t *TestChain) RevertToBlockNumber(blockNumber uint64) error { // It takes an optional state argument, which is the state to execute the message over. If not provided, the // current pending state (or committed state if none is pending) will be used instead. // The state executed over may be a pending block state. -func (t *TestChain) CallContract(msg core.Message, state *state.StateDB, additionalTracers ...vm.EVMLogger) (*core.ExecutionResult, error) { +func (t *TestChain) CallContract(msg *core.Message, state *state.StateDB, additionalTracers ...*TestChainTracer) (*core.ExecutionResult, error) { // If our provided state is nil, use our current chain state. if state == nil { state = t.state @@ -537,8 +568,7 @@ func (t *TestChain) CallContract(msg core.Message, state *state.StateDB, additio snapshot := state.Snapshot() // Set infinite balance to the fake caller account - from := state.GetOrNewStateObject(msg.From()) - from.SetBalance(math.MaxBig256) + state.SetBalance(msg.From, uint256.MustFromBig(math.MaxBig256), tracing.BalanceChangeUnspecified) // Create our transaction and block contexts for the vm txContext := core.NewEVMTxContext(msg) @@ -547,27 +577,52 @@ func (t *TestChain) CallContract(msg core.Message, state *state.StateDB, additio // Create a new call tracer router that incorporates any additional tracers provided just for this call, while // still calling our internal tracers. extendedTracerRouter := NewTestChainTracerRouter() - extendedTracerRouter.AddTracer(t.callTracerRouter) + extendedTracerRouter.AddTracer(t.callTracerRouter.NativeTracer()) extendedTracerRouter.AddTracers(additionalTracers...) // Create our EVM instance. evm := vm.NewEVM(blockContext, txContext, state, t.chainConfig, vm.Config{ - Debug: true, - Tracer: extendedTracerRouter, + Tracer: extendedTracerRouter.NativeTracer().Tracer.Hooks, NoBaseFee: true, ConfigExtensions: t.vmConfigExtensions, }) + // Set our block context and chain config in order for cheatcodes to override what EVM interpreter sees. + t.pendingBlockContext = &evm.Context + t.pendingBlockChainConfig = evm.ChainConfig() + // Create a tx from our msg, for hashing/receipt purposes + tx := utils.MessageToTransaction(msg) + + // Need to explicitly call OnTxStart hook + if evm.Config.Tracer != nil && evm.Config.Tracer.OnTxStart != nil { + evm.Config.Tracer.OnTxStart(evm.GetVMContext(), tx, msg.From) + } // Fund the gas pool, so it can execute endlessly (no block gas limit). gasPool := new(core.GasPool).AddGas(math.MaxUint64) // Perform our state transition to obtain the result. - res, err := core.NewStateTransition(evm, msg, gasPool).TransitionDb() + msgResult, err := core.ApplyMessage(evm, msg, gasPool) // Revert to our state snapshot to undo any changes. state.RevertToSnapshot(snapshot) - return res, err + // Gather receipt for OnTxEnd + receipt := &types.Receipt{Type: tx.Type()} + if msgResult.Failed() { + receipt.Status = types.ReceiptStatusFailed + } else { + receipt.Status = types.ReceiptStatusSuccessful + } + receipt.TxHash = tx.Hash() + receipt.GasUsed = msgResult.UsedGas + + // HACK: use OnTxEnd to store the execution trace. + // Need to explicitly call OnTxEnd + if evm.Config.Tracer != nil && evm.Config.Tracer.OnTxEnd != nil { + evm.Config.Tracer.OnTxEnd(receipt, err) + } + + return msgResult, err } // PendingBlock describes the current pending block which is being constructed and awaiting commitment to the chain. @@ -671,16 +726,13 @@ func (t *TestChain) PendingBlockCreateWithParameters(blockNumber uint64, blockTi // PendingBlockAddTx takes a message (internal txs) and adds it to the current pending block, updating the header // with relevant execution information. If a pending block was not created, an error is returned. -// Returns the constructed block, or an error if one occurred. -func (t *TestChain) PendingBlockAddTx(message core.Message) error { +// Returns an error if one occurred. +func (t *TestChain) PendingBlockAddTx(message *core.Message, additionalTracers ...*TestChainTracer) error { // If we don't have a pending block, return an error if t.pendingBlock == nil { return errors.New("could not add tx to the chain's pending block because no pending block was created") } - // Obtain our state root hash prior to execution. - previousStateRoot := t.pendingBlock.Header.Root - // Create a gas pool indicating how much gas can be spent executing the transaction. gasPool := new(core.GasPool).AddGas(t.pendingBlock.Header.GasLimit - t.pendingBlock.Header.GasUsed) @@ -690,28 +742,46 @@ func (t *TestChain) PendingBlockAddTx(message core.Message) error { // Create a new context to be used in the EVM environment blockContext := newTestChainBlockContext(t, t.pendingBlock.Header) - // Create our EVM instance. - evm := vm.NewEVM(blockContext, core.NewEVMTxContext(message), t.state, t.chainConfig, vm.Config{ - Debug: true, - Tracer: t.transactionTracerRouter, + // Create our VM config + vmConfig := vm.Config{ NoBaseFee: true, ConfigExtensions: t.vmConfigExtensions, - }) + } + + // Figure out whether we need to attach any more tracers + var extendedTracerRouter *TestChainTracerRouter + if len(additionalTracers) > 0 { + // If we have more tracers, extend the transaction tracer router's tracers with additional ones + extendedTracerRouter = NewTestChainTracerRouter() + extendedTracerRouter.AddTracer(t.transactionTracerRouter.NativeTracer()) + extendedTracerRouter.AddTracers(additionalTracers...) + } else { + extendedTracerRouter = t.transactionTracerRouter + } + + // Update the VM's tracer + vmConfig.Tracer = extendedTracerRouter.NativeTracer().Tracer.Hooks + + // Set tx context + t.state.SetTxContext(tx.Hash(), len(t.pendingBlock.Messages)) + + // Create our EVM instance. + evm := vm.NewEVM(blockContext, core.NewEVMTxContext(message), t.state, t.chainConfig, vmConfig) + + // Set our block context and chain config in order for cheatcodes to override what EVM interpreter sees. + t.pendingBlockContext = &evm.Context + t.pendingBlockChainConfig = evm.ChainConfig() // Apply our transaction var usedGas uint64 - t.state.SetTxContext(tx.Hash(), len(t.pendingBlock.Messages)) - receipt, executionResult, err := vendored.EVMApplyTransaction(message, t.chainConfig, &t.pendingBlock.Header.Coinbase, gasPool, t.state, t.pendingBlock.Header.Number, t.pendingBlock.Hash, tx, &usedGas, evm) + receipt, executionResult, err := vendored.EVMApplyTransaction(message, t.chainConfig, t.testChainConfig, &t.pendingBlock.Header.Coinbase, gasPool, t.state, t.pendingBlock.Header.Number, t.pendingBlock.Hash, tx, &usedGas, evm) if err != nil { - // If we encountered an error, reset our state, as we couldn't add the tx. - t.state, _ = state.New(t.pendingBlock.Header.Root, t.stateDatabase, nil) return fmt.Errorf("test chain state write error when adding tx to pending block: %v", err) } // Create our message result messageResult := &chainTypes.MessageResults{ - PreStateRoot: previousStateRoot, - PostStateRoot: common.Hash{}, + PostStateRoot: common.BytesToHash(receipt.PostState), ExecutionResult: executionResult, Receipt: receipt, AdditionalResults: make(map[string]any, 0), @@ -720,31 +790,10 @@ func (t *TestChain) PendingBlockAddTx(message core.Message) error { // For every tracer we have, we call upon them to set their results for this transaction now. t.transactionTracerRouter.CaptureTxEndSetAdditionalResults(messageResult) - // Write state changes to database. - // NOTE: If this completes without an error, we know we didn't hit the block gas limit or other errors, so we are - // safe to update the block header afterwards. - root, err := t.state.Commit(t.chainConfig.IsEIP158(t.pendingBlock.Header.Number)) - if err != nil { - return fmt.Errorf("test chain state write error: %v", err) - } - if err := t.state.Database().TrieDB().Commit(root, false); err != nil { - // If we encountered an error, reset our state, as we couldn't add the tx. - t.state, _ = state.New(t.pendingBlock.Header.Root, t.stateDatabase, nil) - return fmt.Errorf("test chain trie write error: %v", err) - } - // Update our gas used in the block header t.pendingBlock.Header.GasUsed += receipt.GasUsed - // Update our block's bloom filter t.pendingBlock.Header.Bloom.Add(receipt.Bloom.Bytes()) - - // Update the header's state root hash, as well as our message result's - // Note: You could also retrieve the root without committing by using - // state.IntermediateRoot(config.IsEIP158(parentBlockNumber)). - t.pendingBlock.Header.Root = root - messageResult.PostStateRoot = root - // Update our block's transactions and results. t.pendingBlock.Messages = append(t.pendingBlock.Messages, message) t.pendingBlock.MessageResults = append(t.pendingBlock.MessageResults, messageResult) @@ -776,6 +825,25 @@ func (t *TestChain) PendingBlockCommit() error { return fmt.Errorf("could not commit chain's pending block, as no pending block was created") } + // Perform a state commit to obtain the root hash for our block. + root, err := t.state.Commit(t.pendingBlock.Header.Number.Uint64(), true) + t.pendingBlock.Header.Root = root + + if err != nil { + return err + } + + // Committing the state invalidates the cached tries and we need to reload the state. + // Otherwise, methods such as FillFromTestChainProperties will not work correctly. + t.state, err = state.New(root, t.stateDatabase, nil) + if err != nil { + return err + } + + // Discard the test chain's reference to the EVM interpreter's block context and chain config. + t.pendingBlockContext = nil + t.pendingBlockChainConfig = nil + // Append our new block to our chain. t.blocks = append(t.blocks, t.pendingBlock) @@ -784,7 +852,7 @@ func (t *TestChain) PendingBlockCommit() error { t.pendingBlock = nil // Emit our event for committing a new block as the chain head - err := t.Events.PendingBlockCommitted.Publish(PendingBlockCommittedEvent{ + err = t.Events.PendingBlockCommitted.Publish(PendingBlockCommittedEvent{ Chain: t, Block: pendingBlock, }) @@ -805,6 +873,8 @@ func (t *TestChain) PendingBlockDiscard() error { // Clear our pending block, but keep a copy of it to emit our event pendingBlock := t.pendingBlock t.pendingBlock = nil + t.pendingBlockContext = nil + t.pendingBlockChainConfig = nil // Emit our contract change events for the messages reverted err := t.emitContractChangeEvents(true, pendingBlock.MessageResults...) @@ -848,8 +918,9 @@ func (t *TestChain) emitContractChangeEvents(reverting bool, messageResults ...* // this execution result is being committed to chain. if deploymentChange.Creation { err = t.Events.ContractDeploymentAddedEventEmitter.Publish(ContractDeploymentsAddedEvent{ - Chain: t, - Contract: deploymentChange.Contract, + Chain: t, + Contract: deploymentChange.Contract, + DynamicDeployment: deploymentChange.DynamicCreation, }) } else if deploymentChange.Destroyed { err = t.Events.ContractDeploymentRemovedEventEmitter.Publish(ContractDeploymentsRemovedEvent{ @@ -879,8 +950,9 @@ func (t *TestChain) emitContractChangeEvents(reverting bool, messageResults ...* }) } else if deploymentChange.Destroyed { err = t.Events.ContractDeploymentAddedEventEmitter.Publish(ContractDeploymentsAddedEvent{ - Chain: t, - Contract: deploymentChange.Contract, + Chain: t, + Contract: deploymentChange.Contract, + DynamicDeployment: deploymentChange.DynamicCreation, }) } if err != nil { diff --git a/chain/test_chain_deployments_tracer.go b/chain/test_chain_deployments_tracer.go index c2672aaa..2687356d 100644 --- a/chain/test_chain_deployments_tracer.go +++ b/chain/test_chain_deployments_tracer.go @@ -1,10 +1,15 @@ package chain import ( + "math/big" + "github.com/crytic/medusa/chain/types" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/tracing" + coretypes "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/vm" - "math/big" + "github.com/ethereum/go-ethereum/eth/tracers" + "github.com/ethereum/go-ethereum/params/forks" ) // testChainDeploymentsTracer implements TestChainTracer, capturing information regarding contract deployments and @@ -17,16 +22,16 @@ type testChainDeploymentsTracer struct { // callDepth refers to the current EVM depth during tracing. callDepth uint64 - // evm refers to the EVM instance last captured. - evm *vm.EVM + // evm refers to the last tracing.VMContext captured. + evmContext *tracing.VMContext // pendingCallFrames represents per-call-frame data deployment information being captured by the tracer. // This is committed as each call frame succeeds, so that contract deployments which later encountered an error // and reverted are not considered. The index of each element in the array represents its call frame depth. pendingCallFrames []*testChainDeploymentsTracerCallFrame - // selfDestructDestroysCode indicates whether the SELFDESTRUCT opcode is configured to remove contract code. - selfDestructDestroysCode bool + // nativeTracer is the underlying tracer interface that the deployment tracer follows + nativeTracer *TestChainTracer } // testChainDeploymentsTracerCallFrame represents per-call-frame data traced by a testChainDeploymentsTracer. @@ -37,139 +42,123 @@ type testChainDeploymentsTracerCallFrame struct { // newTestChainDeploymentsTracer creates a testChainDeploymentsTracer func newTestChainDeploymentsTracer() *testChainDeploymentsTracer { - tracer := &testChainDeploymentsTracer{ - selfDestructDestroysCode: true, // TODO: Update this when new EIP is introduced by checking the chain config. + tracer := &testChainDeploymentsTracer{} + innerTracer := &tracers.Tracer{ + Hooks: &tracing.Hooks{ + OnTxStart: tracer.OnTxStart, + OnTxEnd: tracer.OnTxEnd, + OnEnter: tracer.OnEnter, + OnExit: tracer.OnExit, + OnOpcode: tracer.OnOpcode, + }, } + tracer.nativeTracer = &TestChainTracer{Tracer: innerTracer, CaptureTxEndSetAdditionalResults: tracer.CaptureTxEndSetAdditionalResults} + return tracer + } -// CaptureTxStart is called upon the start of transaction execution, as defined by vm.EVMLogger. -func (t *testChainDeploymentsTracer) CaptureTxStart(gasLimit uint64) { - // Reset our capture state - t.callDepth = 0 +// NativeTracer returns the underlying TestChainTracer. +func (t *testChainDeploymentsTracer) NativeTracer() *TestChainTracer { + return t.nativeTracer +} + +// OnTxStart is called upon the start of transaction execution, as defined by tracers.Tracer. +func (t *testChainDeploymentsTracer) OnTxStart(vm *tracing.VMContext, tx *coretypes.Transaction, from common.Address) { + // Reset our tracer state t.results = make([]types.DeployedContractBytecodeChange, 0) t.pendingCallFrames = make([]*testChainDeploymentsTracerCallFrame, 0) + + // Store our evm reference + t.evmContext = vm } -// CaptureTxEnd is called upon the end of transaction execution, as defined by vm.EVMLogger. -func (t *testChainDeploymentsTracer) CaptureTxEnd(restGas uint64) { +// OnTxEnd is called upon the end of transaction execution, as defined by tracers.Tracer. +func (t *testChainDeploymentsTracer) OnTxEnd(receipt *coretypes.Receipt, err error) { } -// CaptureStart initializes the tracing operation for the top of a call frame, as defined by vm.EVMLogger. -func (t *testChainDeploymentsTracer) CaptureStart(env *vm.EVM, from common.Address, to common.Address, create bool, input []byte, gas uint64, value *big.Int) { - // Store our evm reference - t.evm = env - - // Create our call frame struct to track data for this initial entry call frame. +// OnEnter is called upon entering of the call frame, as defined by tracers.Tracer. +func (t *testChainDeploymentsTracer) OnEnter(depth int, typ byte, from common.Address, to common.Address, input []byte, gas uint64, value *big.Int) { + // Create our call frame struct to track data for this call frame. callFrameData := &testChainDeploymentsTracerCallFrame{} t.pendingCallFrames = append(t.pendingCallFrames, callFrameData) + // Update call depth if this is not the top-level call frame + isTopLevelFrame := depth == 0 + if !isTopLevelFrame { + t.callDepth++ + } + // If this is a contract creation, record the `to` address as a pending deployment (if it succeeds upon exit, // we commit it). - if create { + if typ == byte(vm.CREATE) || typ == byte(vm.CREATE2) { callFrameData.results = append(callFrameData.results, types.DeployedContractBytecodeChange{ Contract: &types.DeployedContractBytecode{ Address: to, InitBytecode: input, RuntimeBytecode: nil, }, - Creation: true, - SelfDestructed: false, - Destroyed: false, + Creation: true, + DynamicCreation: !isTopLevelFrame, // If we're not at the top level, this is a dynamic creation. + SelfDestructed: false, + Destroyed: false, }) } } -// CaptureEnd is called after a call to finalize tracing completes for the top of a call frame, as defined by vm.EVMLogger. -func (t *testChainDeploymentsTracer) CaptureEnd(output []byte, gasUsed uint64, err error) { +// OnExit is called after a call to finalize tracing completes for the top of a call frame, as defined by tracers.Tracer. +func (t *testChainDeploymentsTracer) OnExit(depth int, output []byte, gasUsed uint64, err error, reverted bool) { + // Check to see if this is the top level call frame + isTopLevelFrame := depth == 0 + // Fetch runtime bytecode for all deployments in this frame which did not record one, before exiting. // We had to fetch it upon exit as it does not exist during creation of course. for _, contractChange := range t.pendingCallFrames[t.callDepth].results { if contractChange.Creation && contractChange.Contract.RuntimeBytecode == nil { - contractChange.Contract.RuntimeBytecode = t.evm.StateDB.GetCode(contractChange.Contract.Address) + contractChange.Contract.RuntimeBytecode = t.evmContext.StateDB.GetCode(contractChange.Contract.Address) } } - // If we didn't encounter an error in this call frame, we're at the end, so we commit all results. - if err == nil { + // If we didn't encounter any errors and this is the top level call frame, commit all the results + if isTopLevelFrame { t.results = append(t.results, t.pendingCallFrames[t.callDepth].results...) - } - - // We're exiting the current frame, so remove our frame data. - t.pendingCallFrames = t.pendingCallFrames[:t.callDepth] -} - -// CaptureEnter is called upon entering of the call frame, as defined by vm.EVMLogger. -func (t *testChainDeploymentsTracer) CaptureEnter(typ vm.OpCode, from common.Address, to common.Address, input []byte, gas uint64, value *big.Int) { - // Increase our call depth now that we're entering a new call frame. - t.callDepth++ - - // Create our call frame struct to track data for this initial entry call frame. - callFrameData := &testChainDeploymentsTracerCallFrame{} - t.pendingCallFrames = append(t.pendingCallFrames, callFrameData) - - // If this is a contract creation, record the `to` address as a pending deployment (if it succeeds upon exit, - // we commit it). - if typ == vm.CREATE || typ == vm.CREATE2 { - callFrameData.results = append(callFrameData.results, types.DeployedContractBytecodeChange{ - Contract: &types.DeployedContractBytecode{ - Address: to, - InitBytecode: input, - RuntimeBytecode: nil, - }, - Creation: true, - SelfDestructed: false, - Destroyed: false, - }) - } -} - -// CaptureExit is called upon exiting of the call frame, as defined by vm.EVMLogger. -func (t *testChainDeploymentsTracer) CaptureExit(output []byte, gasUsed uint64, err error) { - // Fetch runtime bytecode for all deployments in this frame which did not record one, before exiting. - // We had to fetch it upon exit as it does not exist during creation of course. - for _, contractChange := range t.pendingCallFrames[t.callDepth].results { - if contractChange.Creation && contractChange.Contract.RuntimeBytecode == nil { - contractChange.Contract.RuntimeBytecode = t.evm.StateDB.GetCode(contractChange.Contract.Address) + } else { + // If we didn't encounter an error in this call frame, we push our captured data up one frame. + if err == nil { + t.pendingCallFrames[t.callDepth-1].results = append(t.pendingCallFrames[t.callDepth-1].results, t.pendingCallFrames[t.callDepth].results...) } - } - // If we didn't encounter an error in this call frame, we push our captured data up one frame. - if err == nil { - t.pendingCallFrames[t.callDepth-1].results = append(t.pendingCallFrames[t.callDepth-1].results, t.pendingCallFrames[t.callDepth].results...) + // We're exiting the current frame, so remove our frame data and decrement the call depth. + t.pendingCallFrames = t.pendingCallFrames[:t.callDepth] + t.callDepth-- } - // We're exiting the current frame, so remove our frame data. - t.pendingCallFrames = t.pendingCallFrames[:t.callDepth] - - // Decrease our call depth now that we've exited a call frame. - t.callDepth-- } -// CaptureState records data from an EVM state update, as defined by vm.EVMLogger. -func (t *testChainDeploymentsTracer) CaptureState(pc uint64, op vm.OpCode, gas, cost uint64, scope *vm.ScopeContext, rData []byte, depth int, vmErr error) { +// OnOpcode records data from an EVM state update, as defined by tracers.Tracer. +func (t *testChainDeploymentsTracer) OnOpcode(pc uint64, op byte, gas, cost uint64, scope tracing.OpContext, rData []byte, depth int, err error) { // If we encounter a SELFDESTRUCT operation, record the change to our contract in our results. - if op == vm.SELFDESTRUCT { + if op == byte(vm.SELFDESTRUCT) { callFrameData := t.pendingCallFrames[t.callDepth] + addr := scope.Address() + code := t.evmContext.StateDB.GetCode(addr) callFrameData.results = append(callFrameData.results, types.DeployedContractBytecodeChange{ Contract: &types.DeployedContractBytecode{ - Address: scope.Contract.Address(), + Address: addr, InitBytecode: nil, - RuntimeBytecode: t.evm.StateDB.GetCode(scope.Contract.Address()), + RuntimeBytecode: code, }, - Creation: false, - SelfDestructed: true, - Destroyed: t.selfDestructDestroysCode, + Creation: false, + DynamicCreation: false, + SelfDestructed: true, + // Check if this is a new contract (not previously deployed and self destructed). + // https://github.com/ethereum/go-ethereum/blob/8d42e115b1cae4f09fd02b71c06ec9c85f22ad4f/core/state/statedb.go#L504-L506 + Destroyed: t.evmContext.ChainConfig.LatestFork(t.evmContext.Time) < forks.Cancun || !t.evmContext.StateDB.Exist(addr), }) } } -// CaptureFault records an execution fault, as defined by vm.EVMLogger. -func (t *testChainDeploymentsTracer) CaptureFault(pc uint64, op vm.OpCode, gas, cost uint64, scope *vm.ScopeContext, depth int, err error) { - -} - // CaptureTxEndSetAdditionalResults can be used to set additional results captured from execution tracing. If this // tracer is used during transaction execution (block creation), the results can later be queried from the block. // This method will only be called on the added tracer if it implements the extended TestChainTracer interface. diff --git a/chain/test_chain_events.go b/chain/test_chain_events.go index adc92fd4..49c4cf48 100644 --- a/chain/test_chain_events.go +++ b/chain/test_chain_events.go @@ -93,6 +93,10 @@ type ContractDeploymentsAddedEvent struct { // Contract defines information for the contract which was deployed to the Chain. Contract *types.DeployedContractBytecode + + // DynamicDeployment describes whether this contract deployment was dynamic (e.g. `c = new MyContract()`) or was + // because of a traditional transaction + DynamicDeployment bool } // ContractDeploymentsRemovedEvent describes an event where a contract has become unavailable on the TestChain, either diff --git a/chain/test_chain_test.go b/chain/test_chain_test.go index bb049d1c..048822a1 100644 --- a/chain/test_chain_test.go +++ b/chain/test_chain_test.go @@ -64,12 +64,12 @@ func createChain(t *testing.T) (*TestChain, []common.Address) { assert.NoError(t, err) // NOTE: Sharing GenesisAlloc between nodes will result in some accounts not being funded for some reason. - genesisAlloc := make(core.GenesisAlloc) + genesisAlloc := make(types.GenesisAlloc) // Fund all of our sender addresses in the genesis block initBalance := new(big.Int).Div(abi.MaxInt256, big.NewInt(2)) for _, sender := range senders { - genesisAlloc[sender] = core.GenesisAccount{ + genesisAlloc[sender] = types.Account{ Balance: initBalance, } } @@ -215,7 +215,7 @@ func TestChainDynamicDeployments(t *testing.T) { compilations, _, err := cryticCompile.Compile() assert.NoError(t, err) assert.EqualValues(t, 1, len(compilations)) - assert.EqualValues(t, 1, len(compilations[0].Sources)) + assert.EqualValues(t, 1, len(compilations[0].SourcePathToArtifact)) // Obtain our chain and senders chain, senders := createChain(t) @@ -223,7 +223,7 @@ func TestChainDynamicDeployments(t *testing.T) { // Deploy each contract that has no construct arguments. deployCount := 0 for _, compilation := range compilations { - for _, source := range compilation.Sources { + for _, source := range compilation.SourcePathToArtifact { for _, contract := range source.Contracts { contract := contract if len(contract.Abi.Constructor.Inputs) == 0 { @@ -241,14 +241,26 @@ func TestChainDynamicDeployments(t *testing.T) { // Deploy the currently indexed contract next // Create a message to represent our contract deployment. - msg := types.NewMessage(senders[0], nil, chain.State().GetNonce(senders[0]), big.NewInt(0), chain.BlockGasLimit, big.NewInt(1), big.NewInt(0), big.NewInt(0), contract.InitBytecode, nil, false) + msg := core.Message{ + To: nil, + From: senders[0], + Nonce: chain.State().GetNonce(senders[0]), + Value: big.NewInt(0), + GasLimit: chain.BlockGasLimit, + GasPrice: big.NewInt(1), + GasFeeCap: big.NewInt(0), + GasTipCap: big.NewInt(0), + Data: contract.InitBytecode, + AccessList: nil, + SkipAccountChecks: false, + } // Create a new pending block we'll commit to chain block, err := chain.PendingBlockCreate() assert.NoError(t, err) // Add our transaction to the block - err = chain.PendingBlockAddTx(msg) + err = chain.PendingBlockAddTx(&msg) assert.NoError(t, err) // Commit the pending block to the chain, so it becomes the new head. @@ -317,7 +329,7 @@ func TestChainDeploymentWithArgs(t *testing.T) { compilations, _, err := cryticCompile.Compile() assert.NoError(t, err) assert.EqualValues(t, 1, len(compilations)) - assert.EqualValues(t, 1, len(compilations[0].Sources)) + assert.EqualValues(t, 1, len(compilations[0].SourcePathToArtifact)) // Obtain our chain and senders chain, senders := createChain(t) @@ -334,7 +346,7 @@ func TestChainDeploymentWithArgs(t *testing.T) { // Deploy each contract deployCount := 0 for _, compilation := range compilations { - for _, source := range compilation.Sources { + for _, source := range compilation.SourcePathToArtifact { for contractName, contract := range source.Contracts { contract := contract @@ -354,14 +366,26 @@ func TestChainDeploymentWithArgs(t *testing.T) { assert.NoError(t, err) // Create a message to represent our contract deployment. - msg := types.NewMessage(senders[0], nil, chain.State().GetNonce(senders[0]), big.NewInt(0), chain.BlockGasLimit, big.NewInt(1), big.NewInt(0), big.NewInt(0), msgData, nil, false) + msg := core.Message{ + To: nil, + From: senders[0], + Nonce: chain.State().GetNonce(senders[0]), + Value: big.NewInt(0), + GasLimit: chain.BlockGasLimit, + GasPrice: big.NewInt(1), + GasFeeCap: big.NewInt(0), + GasTipCap: big.NewInt(0), + Data: msgData, + AccessList: nil, + SkipAccountChecks: false, + } // Create a new pending block we'll commit to chain block, err := chain.PendingBlockCreate() assert.NoError(t, err) // Add our transaction to the block - err = chain.PendingBlockAddTx(msg) + err = chain.PendingBlockAddTx(&msg) assert.NoError(t, err) // Commit the pending block to the chain, so it becomes the new head. @@ -443,7 +467,7 @@ func TestChainCloning(t *testing.T) { // Deploy each contract that has no construct arguments 10 times. for _, compilation := range compilations { - for _, source := range compilation.Sources { + for _, source := range compilation.SourcePathToArtifact { for _, contract := range source.Contracts { contract := contract if len(contract.Abi.Constructor.Inputs) == 0 { @@ -451,14 +475,26 @@ func TestChainCloning(t *testing.T) { // Deploy the currently indexed contract next // Create a message to represent our contract deployment. - msg := types.NewMessage(senders[0], nil, chain.State().GetNonce(senders[0]), big.NewInt(0), chain.BlockGasLimit, big.NewInt(1), big.NewInt(0), big.NewInt(0), contract.InitBytecode, nil, false) + msg := core.Message{ + To: nil, + From: senders[0], + Nonce: chain.State().GetNonce(senders[0]), + Value: big.NewInt(0), + GasLimit: chain.BlockGasLimit, + GasPrice: big.NewInt(1), + GasFeeCap: big.NewInt(0), + GasTipCap: big.NewInt(0), + Data: contract.InitBytecode, + AccessList: nil, + SkipAccountChecks: false, + } // Create a new pending block we'll commit to chain block, err := chain.PendingBlockCreate() assert.NoError(t, err) // Add our transaction to the block - err = chain.PendingBlockAddTx(msg) + err = chain.PendingBlockAddTx(&msg) assert.NoError(t, err) // Commit the pending block to the chain, so it becomes the new head. @@ -503,7 +539,7 @@ func TestChainCloning(t *testing.T) { }) } -// TestCallSequenceReplayMatchSimple creates a TestChain, sends some messages to it, then creates another chain which +// TestChainCallSequenceReplayMatchSimple creates a TestChain, sends some messages to it, then creates another chain which // it replays the same sequence on. It ensures that the ending state is the same. // Note: this does not set block timestamps or other data that might be non-deterministic. // This does not test replaying with a previous call sequence with different timestamps, etc. It expects the TestChain @@ -527,20 +563,32 @@ func TestChainCallSequenceReplayMatchSimple(t *testing.T) { // Deploy each contract that has no construct arguments 10 times. for _, compilation := range compilations { - for _, source := range compilation.Sources { + for _, source := range compilation.SourcePathToArtifact { for _, contract := range source.Contracts { contract := contract if len(contract.Abi.Constructor.Inputs) == 0 { for i := 0; i < 10; i++ { // Create a message to represent our contract deployment. - msg := types.NewMessage(senders[0], nil, chain.State().GetNonce(senders[0]), big.NewInt(0), chain.BlockGasLimit, big.NewInt(1), big.NewInt(0), big.NewInt(0), contract.InitBytecode, nil, false) + msg := core.Message{ + To: nil, + From: senders[0], + Nonce: chain.State().GetNonce(senders[0]), + Value: big.NewInt(0), + GasLimit: chain.BlockGasLimit, + GasPrice: big.NewInt(1), + GasFeeCap: big.NewInt(0), + GasTipCap: big.NewInt(0), + Data: contract.InitBytecode, + AccessList: nil, + SkipAccountChecks: false, + } // Create a new pending block we'll commit to chain block, err := chain.PendingBlockCreate() assert.NoError(t, err) // Add our transaction to the block - err = chain.PendingBlockAddTx(msg) + err = chain.PendingBlockAddTx(&msg) assert.NoError(t, err) // Commit the pending block to the chain, so it becomes the new head. diff --git a/chain/test_chain_tracer.go b/chain/test_chain_tracer.go index fc21a5cc..775cd07f 100644 --- a/chain/test_chain_tracer.go +++ b/chain/test_chain_tracer.go @@ -1,118 +1,148 @@ package chain import ( + "math/big" + "github.com/crytic/medusa/chain/types" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/tracing" + coretypes "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/vm" + "github.com/ethereum/go-ethereum/eth/tracers" "golang.org/x/exp/slices" - "math/big" ) -// TestChainTracer is an extended vm.EVMLogger which can be used with a TestChain to store any captured +// TestChainTracer is an extended tracers.Tracer which can be used with a TestChain to store any captured // information within call results, recorded in each block produced. -type TestChainTracer interface { - // EVMLogger is extended by this logger. - vm.EVMLogger +type TestChainTracer struct { + // tracers.Tracer is extended by this logger. + *tracers.Tracer // CaptureTxEndSetAdditionalResults can be used to set additional results captured from execution tracing. If this // tracer is used during transaction execution (block creation), the results can later be queried from the block. // This method will only be called on the added tracer if it implements the extended TestChainTracer interface. - CaptureTxEndSetAdditionalResults(results *types.MessageResults) + CaptureTxEndSetAdditionalResults func(results *types.MessageResults) } -// TestChainTracerRouter acts as a vm.EVMLogger or TestChainTracer splitter, allowing multiple tracers to be used in +// TestChainTracerRouter acts as a tracers.Tracer, allowing multiple tracers to be used in // place of one. When this tracer receives callback, it calls upon its underlying tracers. type TestChainTracerRouter struct { - // tracers refers to the internally recorded vm.EVMLogger instances to route all calls to. - tracers []vm.EVMLogger + // tracers refers to the internally recorded tracers.Tracer instances to route all calls to. + tracers []*TestChainTracer + nativeTracer *TestChainTracer } // NewTestChainTracerRouter returns a new TestChainTracerRouter instance with no registered tracers. func NewTestChainTracerRouter() *TestChainTracerRouter { - return &TestChainTracerRouter{ - tracers: make([]vm.EVMLogger, 0), + tracer := &TestChainTracerRouter{ + tracers: make([]*TestChainTracer, 0), } + innerTracer := &tracers.Tracer{ + Hooks: &tracing.Hooks{ + OnTxStart: tracer.OnTxStart, + OnTxEnd: tracer.OnTxEnd, + OnEnter: tracer.OnEnter, + OnExit: tracer.OnExit, + OnOpcode: tracer.OnOpcode, + }, + } + tracer.nativeTracer = &TestChainTracer{Tracer: innerTracer, CaptureTxEndSetAdditionalResults: tracer.CaptureTxEndSetAdditionalResults} + + return tracer + } -// AddTracer adds a vm.EVMLogger or TestChainTracer to the TestChainTracerRouter, so all vm.EVMLogger relates calls +// NativeTracer returns the underlying TestChainTracer. +func (t *TestChainTracerRouter) NativeTracer() *TestChainTracer { + return t.nativeTracer +} + +// AddTracer adds a TestChainTracer to the TestChainTracerRouter so that all other tracing.Hooks calls are forwarded. // are forwarded to it. -func (t *TestChainTracerRouter) AddTracer(tracer vm.EVMLogger) { +func (t *TestChainTracerRouter) AddTracer(tracer *TestChainTracer) { t.AddTracers(tracer) } -// AddTracers adds vm.EVMLogger implementations to the TestChainTracerRouter so all other method calls are forwarded -// to them. -func (t *TestChainTracerRouter) AddTracers(tracers ...vm.EVMLogger) { +// AddTracers adds TestChainTracers to the TestChainTracerRouter so that all other tracing.Hooks calls are forwarded. +func (t *TestChainTracerRouter) AddTracers(tracers ...*TestChainTracer) { t.tracers = append(t.tracers, tracers...) } -// Tracers returns the vm.EVMLogger instances added to the TestChainTracerRouter. -func (t *TestChainTracerRouter) Tracers() []vm.EVMLogger { +// Tracers returns the tracers.Tracer instances added to the TestChainTracerRouter. +func (t *TestChainTracerRouter) Tracers() []*TestChainTracer { return slices.Clone(t.tracers) } -// CaptureTxStart is called upon the start of transaction execution, as defined by vm.EVMLogger. -func (t *TestChainTracerRouter) CaptureTxStart(gasLimit uint64) { +// OnTxStart is called upon the start of transaction execution, as defined by tracers.Tracer. +func (t *TestChainTracerRouter) OnTxStart(vm *tracing.VMContext, tx *coretypes.Transaction, from common.Address) { // Call the underlying method for each registered tracer. for _, tracer := range t.tracers { - tracer.CaptureTxStart(gasLimit) + if tracer.OnTxStart != nil { + tracer.OnTxStart(vm, tx, from) + } } } -// CaptureTxEnd is called upon the end of transaction execution, as defined by vm.EVMLogger. -func (t *TestChainTracerRouter) CaptureTxEnd(restGas uint64) { +// OnTxEnd is called upon the end of transaction execution, as defined by tracers.Tracer. +func (t *TestChainTracerRouter) OnTxEnd(receipt *coretypes.Receipt, err error) { // Call the underlying method for each registered tracer. for _, tracer := range t.tracers { - tracer.CaptureTxEnd(restGas) + if tracer.OnTxEnd != nil { + tracer.OnTxEnd(receipt, err) + } } } -// CaptureStart initializes the tracing operation for the top of a call frame, as defined by vm.EVMLogger. -func (t *TestChainTracerRouter) CaptureStart(env *vm.EVM, from common.Address, to common.Address, create bool, input []byte, gas uint64, value *big.Int) { +// OnEnter initializes the tracing operation for the top of a call frame, as defined by tracers.Tracer. +func (t *TestChainTracerRouter) OnEnter(depth int, typ byte, from common.Address, to common.Address, input []byte, gas uint64, value *big.Int) { // Call the underlying method for each registered tracer. for _, tracer := range t.tracers { - tracer.CaptureStart(env, from, to, create, input, gas, value) + if tracer.OnEnter != nil { + tracer.OnEnter(depth, typ, from, to, input, gas, value) + } } } -// CaptureEnd is called after a call to finalize tracing completes for the top of a call frame, as defined by vm.EVMLogger. -func (t *TestChainTracerRouter) CaptureEnd(output []byte, gasUsed uint64, err error) { +// OnExit is called after a call to finalize tracing completes for the top of a call frame, as defined by tracers.Tracer. +func (t *TestChainTracerRouter) OnExit(depth int, output []byte, gasUsed uint64, err error, reverted bool) { // Call the underlying method for each registered tracer. for _, tracer := range t.tracers { - tracer.CaptureEnd(output, gasUsed, err) + if tracer.OnExit != nil { + tracer.OnExit(depth, output, gasUsed, err, reverted) + } } } -// CaptureEnter is called upon entering of the call frame, as defined by vm.EVMLogger. -func (t *TestChainTracerRouter) CaptureEnter(typ vm.OpCode, from common.Address, to common.Address, input []byte, gas uint64, value *big.Int) { +// OnOpcode records data from an EVM state update, as defined by tracers.Tracer. +func (t *TestChainTracerRouter) OnOpcode(pc uint64, op byte, gas, cost uint64, scope tracing.OpContext, rData []byte, depth int, err error) { // Call the underlying method for each registered tracer. for _, tracer := range t.tracers { - tracer.CaptureEnter(typ, from, to, input, gas, value) - } -} + if tracer.OnOpcode != nil { -// CaptureExit is called upon exiting of the call frame, as defined by vm.EVMLogger. -func (t *TestChainTracerRouter) CaptureExit(output []byte, gasUsed uint64, err error) { - // Call the underlying method for each registered tracer. - for _, tracer := range t.tracers { - tracer.CaptureExit(output, gasUsed, err) + tracer.OnOpcode(pc, op, gas, cost, scope, rData, depth, err) + } } } -// CaptureState records data from an EVM state update, as defined by vm.EVMLogger. -func (t *TestChainTracerRouter) CaptureState(pc uint64, op vm.OpCode, gas, cost uint64, scope *vm.ScopeContext, rData []byte, depth int, vmErr error) { +// OnFault records an execution fault, as defined by tracers.Tracer. +func (t *TestChainTracerRouter) OnFault(pc uint64, op byte, gas, cost uint64, scope *vm.ScopeContext, depth int, err error) { // Call the underlying method for each registered tracer. for _, tracer := range t.tracers { - tracer.CaptureState(pc, op, gas, cost, scope, rData, depth, vmErr) + if tracer.OnFault != nil { + tracer.OnFault(pc, op, gas, cost, scope, depth, err) + } } } -// CaptureFault records an execution fault, as defined by vm.EVMLogger. -func (t *TestChainTracerRouter) CaptureFault(pc uint64, op vm.OpCode, gas, cost uint64, scope *vm.ScopeContext, depth int, err error) { +func (t *TestChainTracerRouter) OnCodeChange(a common.Address, prevCodeHash common.Hash, prev []byte, codeHash common.Hash, code []byte) { // Call the underlying method for each registered tracer. + for _, tracer := range t.tracers { - tracer.CaptureFault(pc, op, gas, cost, scope, depth, err) + if tracer.OnCodeChange != nil { + tracer.OnCodeChange(a, prevCodeHash, prev, codeHash, code) + } } + } // CaptureTxEndSetAdditionalResults can be used to set additional results captured from execution tracing. If this @@ -121,9 +151,8 @@ func (t *TestChainTracerRouter) CaptureFault(pc uint64, op vm.OpCode, gas, cost func (t *TestChainTracerRouter) CaptureTxEndSetAdditionalResults(results *types.MessageResults) { // Call the underlying method for each registered tracer. for _, tracer := range t.tracers { - // Try to cast each tracer to a TestChainTracer and forward the call to it. - if testChainTracer, ok := tracer.(TestChainTracer); ok { - testChainTracer.CaptureTxEndSetAdditionalResults(results) + if tracer.CaptureTxEndSetAdditionalResults != nil { + tracer.CaptureTxEndSetAdditionalResults(results) } } } diff --git a/chain/types/block.go b/chain/types/block.go index 47638e19..754f81ff 100644 --- a/chain/types/block.go +++ b/chain/types/block.go @@ -18,7 +18,7 @@ type Block struct { // of a transaction occurs and can be thought of as an internal EVM transaction. It contains typical transaction // fields plainly (e.g., no transaction signature is included, the sender is derived and simply supplied as a field // in a message). - Messages []core.Message + Messages []*core.Message // MessageResults represents the results recorded while executing transactions. MessageResults []*MessageResults @@ -30,7 +30,7 @@ func NewBlock(header *types.Header) *Block { block := &Block{ Hash: header.Hash(), Header: header, - Messages: make([]core.Message, 0), + Messages: make([]*core.Message, 0), MessageResults: make([]*MessageResults, 0), } return block diff --git a/chain/types/deployed_contract_bytecode.go b/chain/types/deployed_contract_bytecode.go index e32c76d1..9f49bef8 100644 --- a/chain/types/deployed_contract_bytecode.go +++ b/chain/types/deployed_contract_bytecode.go @@ -16,6 +16,10 @@ type DeployedContractBytecodeChange struct { // Destroyed are true. Creation bool + // DynamicCreation indicates whether the change made was a _dynamic_ contract creation. This cannot be true if + // Creation is false. + DynamicCreation bool + // SelfDestructed indicates whether the change made was due to a self-destruct instruction being executed. This // cannot be true if Creation is true. // Note: This may not be indicative of contract removal (as is the case with Destroyed), as proposed changes to diff --git a/chain/types/message_results.go b/chain/types/message_results.go index 57ac7fcc..4ce232cc 100644 --- a/chain/types/message_results.go +++ b/chain/types/message_results.go @@ -9,8 +9,6 @@ import ( // MessageResults represents metadata obtained from the execution of a CallMessage in a Block. // This contains results such as contracts deployed, and other variables tracked by a chain.TestChain. type MessageResults struct { - // PreStateRoot refers to the state root hash prior to the execution of this transaction. - PreStateRoot common.Hash // PostStateRoot refers to the state root hash after the execution of this transaction. PostStateRoot common.Hash diff --git a/chain/vendored/apply_transaction.go b/chain/vendored/apply_transaction.go index e9fab99e..1472025b 100644 --- a/chain/vendored/apply_transaction.go +++ b/chain/vendored/apply_transaction.go @@ -17,6 +17,7 @@ package vendored import ( + "github.com/crytic/medusa/chain/config" "github.com/ethereum/go-ethereum/common" . "github.com/ethereum/go-ethereum/core" "github.com/ethereum/go-ethereum/core/state" @@ -35,13 +36,22 @@ import ( // This executes on an underlying EVM and returns a transaction receipt, or an error if one occurs. // Additional changes: // - Exposed core.ExecutionResult as a return value. -func EVMApplyTransaction(msg Message, config *params.ChainConfig, author *common.Address, gp *GasPool, statedb *state.StateDB, blockNumber *big.Int, blockHash common.Hash, tx *types.Transaction, usedGas *uint64, evm *vm.EVM) (*types.Receipt, *ExecutionResult, error) { +func EVMApplyTransaction(msg *Message, config *params.ChainConfig, testChainConfig *config.TestChainConfig, author *common.Address, gp *GasPool, statedb *state.StateDB, blockNumber *big.Int, blockHash common.Hash, tx *types.Transaction, usedGas *uint64, evm *vm.EVM) (receipt *types.Receipt, result *ExecutionResult, err error) { + // Apply the OnTxStart and OnTxEnd hooks + if evm.Config.Tracer != nil && evm.Config.Tracer.OnTxStart != nil { + evm.Config.Tracer.OnTxStart(evm.GetVMContext(), tx, msg.From) + if evm.Config.Tracer.OnTxEnd != nil { + defer func() { + evm.Config.Tracer.OnTxEnd(receipt, err) + }() + } + } // Create a new context to be used in the EVM environment. txContext := NewEVMTxContext(msg) evm.Reset(txContext, statedb) // Apply the transaction to the current state (included in the env). - result, err := ApplyMessage(evm, msg, gp) + result, err = ApplyMessage(evm, msg, gp) if err != nil { return nil, nil, err } @@ -57,7 +67,7 @@ func EVMApplyTransaction(msg Message, config *params.ChainConfig, author *common // Create a new receipt for the transaction, storing the intermediate root and gas used // by the tx. - receipt := &types.Receipt{Type: tx.Type(), PostState: root, CumulativeGasUsed: *usedGas} + receipt = &types.Receipt{Type: tx.Type(), PostState: root, CumulativeGasUsed: *usedGas} if result.Failed() { receipt.Status = types.ReceiptStatusFailed } else { @@ -67,8 +77,20 @@ func EVMApplyTransaction(msg Message, config *params.ChainConfig, author *common receipt.GasUsed = result.UsedGas // If the transaction created a contract, store the creation address in the receipt. - if msg.To() == nil { - receipt.ContractAddress = crypto.CreateAddress(evm.TxContext.Origin, tx.Nonce()) + if msg.To == nil { + // If the contract creation was a predeployed contract, we need to set the receipt's contract address to the + // override address + // Otherwise, we use the traditional method based on tx.origin and nonce + if len(testChainConfig.ContractAddressOverrides) > 0 { + initBytecodeHash := crypto.Keccak256Hash(msg.Data) + if overrideAddr, ok := testChainConfig.ContractAddressOverrides[initBytecodeHash]; ok { + receipt.ContractAddress = overrideAddr + } else { + receipt.ContractAddress = crypto.CreateAddress(evm.TxContext.Origin, tx.Nonce()) + } + } else { + receipt.ContractAddress = crypto.CreateAddress(evm.TxContext.Origin, tx.Nonce()) + } } // Set the receipt logs and create the bloom filter. diff --git a/cmd/completion.go b/cmd/completion.go new file mode 100644 index 00000000..a5e1f66a --- /dev/null +++ b/cmd/completion.go @@ -0,0 +1,103 @@ +package cmd + +import ( + "fmt" + "golang.org/x/exp/slices" + "os" + "strings" + + "github.com/spf13/cobra" +) + +// generalComDesc describes the long description for the completion command +const generalComDesc = ` +Generate the autocompletion script for medusa for the specific shell. + +Bash: +To load completions in the current shell session: + + source <(medusa completion bash) + +To load completions for every new session, execute once: +- Linux: + medusa completion bash > /etc/bash_completion.d/medusa + +- macOS: + medusa completion bash > /usr/local/etc/bash_completion.d/medusa + +Zsh: +To load completions in the current shell session: + + source <(medusa completion zsh) + +To load completions for every new session, execute once: + + medusa completion zsh > "${fpath[1]}/_medusa" + +PowerShell: +To load completions in the current shell session: +PS> medusa completion powershell | Out-String | Invoke-Expression + +To load completions for every new session, run: +PS> medusa completion powershell > medusa.ps1 +and source this file from your PowerShell profile. +` + +var supportedShells = []string{"bash", "zsh", "powershell"} + +// completionCmd represents the completion command +var completionCmd = &cobra.Command{ + Use: "completion ", + Short: "Generate the autocompletion script for medusa for the specific shell", + Long: generalComDesc, + Args: cmdValidateCompletionArgs, + RunE: cmdRunCompletion, + SilenceUsage: true, + SilenceErrors: true, +} + +func init() { + rootCmd.AddCommand(completionCmd) +} + +// cmdValidateCompletionArgs validates CLI arguments +func cmdValidateCompletionArgs(cmd *cobra.Command, args []string) error { + // Make sure we have exactly 1 argument + if err := cobra.ExactArgs(1)(cmd, args); err != nil { + err = fmt.Errorf("completion requires only 1 shell argument (options: %s)", strings.Join(supportedShells, ", ")) + cmdLogger.Error("Failed to validate args for completion command", err) + return err + } + + // Make sure that the shell is a supported type + if contains := slices.Contains(supportedShells, args[0]); !contains { + err := fmt.Errorf("%s is not a supported shell", args[0]) + cmdLogger.Error("Failed to validate args for completion command", err) + return err + } + + return nil +} + +// cmdRunCompletion executes the completion CLI command +func cmdRunCompletion(cmd *cobra.Command, args []string) error { + // NOTE: Please be aware that if the supported shells changes, then this switch statement must also change + var err error + switch args[0] { + case "bash": + err = cmd.Root().GenBashCompletionV2(os.Stdout, true) + case "zsh": + err = cmd.Root().GenZshCompletion(os.Stdout) + case "powershell": + err = cmd.Root().GenPowerShellCompletionWithDesc(os.Stdout) + default: + // We are throwing a panic here because our validation function should have handled this and something is wrong. + cmdLogger.Panic("Failed to run the completion command", fmt.Errorf("%s is not a supported shell type", args[0])) + } + + // Log an error if we encountered one + if err != nil { + cmdLogger.Error("Failed to run the completion command", err) + } + return err +} diff --git a/cmd/exitcodes/error_with_exit_code.go b/cmd/exitcodes/error_with_exit_code.go new file mode 100644 index 00000000..96102f7f --- /dev/null +++ b/cmd/exitcodes/error_with_exit_code.go @@ -0,0 +1,41 @@ +package exitcodes + +// ErrorWithExitCode is an `error` type that wraps an existing error and exit code, providing exit codes +// for a given error if they are bubbled up to the top-level. +type ErrorWithExitCode struct { + err error + exitCode int +} + +// NewErrorWithExitCode creates a new error (ErrorWithExitCode) with the provided internal error and exit code. +func NewErrorWithExitCode(err error, exitCode int) *ErrorWithExitCode { + return &ErrorWithExitCode{ + err: err, + exitCode: exitCode, + } +} + +// Error returns the error message string, implementing the `error` interface. +func (e *ErrorWithExitCode) Error() string { + if e.err == nil { + return "" + } + return e.err.Error() +} + +// GetInnerErrorAndExitCode checks the given exit code that the application should exit with, if this error is bubbled +// to the top-level. This will be 0 for a nil error, 1 for a generic error, or arbitrary if the error is of type +// ErrorWithExitCode. +// Returns the error (or inner error if it is an ErrorWithExitCode error type), along with the exit code associated +// with the error. +func GetInnerErrorAndExitCode(err error) (error, int) { + // If we have no error, return 0, if we have a generic error, return 1, if we have a custom error code, unwrap + // and return it. + if err == nil { + return nil, ExitCodeSuccess + } else if unwrappedErr, ok := err.(*ErrorWithExitCode); ok { + return unwrappedErr.err, unwrappedErr.exitCode + } else { + return err, ExitCodeGeneralError + } +} diff --git a/cmd/exitcodes/exit_codes.go b/cmd/exitcodes/exit_codes.go new file mode 100644 index 00000000..5bbed621 --- /dev/null +++ b/cmd/exitcodes/exit_codes.go @@ -0,0 +1,25 @@ +package exitcodes + +const ( + // ================================ + // Platform-universal exit codes + // ================================ + + // ExitCodeSuccess indicates no errors or failures had occurred. + ExitCodeSuccess = 0 + + // ExitCodeGeneralError indicates some type of general error occurred. + ExitCodeGeneralError = 1 + + // ================================ + // Application-specific exit codes + // ================================ + // Note: Despite not being standardized, exit codes 2-5 are often used for common use cases, so we avoid them. + + // ExitCodeHandledError indicates that there was an error that was logged already and does not need to be handled + // by main. + ExitCodeHandledError = 6 + + // ExitCodeTestFailed indicates a test case had failed. + ExitCodeTestFailed = 7 +) diff --git a/cmd/fuzz.go b/cmd/fuzz.go index 0c6508d3..c9905b84 100644 --- a/cmd/fuzz.go +++ b/cmd/fuzz.go @@ -6,40 +6,70 @@ import ( "os/signal" "path/filepath" + "github.com/crytic/medusa/cmd/exitcodes" + "github.com/crytic/medusa/logging/colors" + "github.com/crytic/medusa/fuzzing" "github.com/crytic/medusa/fuzzing/config" "github.com/spf13/cobra" + "github.com/spf13/pflag" ) // fuzzCmd represents the command provider for fuzzing var fuzzCmd = &cobra.Command{ - Use: "fuzz", - Short: "Starts a fuzzing campaign", - Long: `Starts a fuzzing campaign`, - Args: cmdValidateFuzzArgs, - RunE: cmdRunFuzz, -} - -// cmdValidateFuzzArgs makes sure that there are no positional arguments provided to the fuzz command -func cmdValidateFuzzArgs(cmd *cobra.Command, args []string) error { - // Make sure we have no positional args - if err := cobra.NoArgs(cmd, args); err != nil { - return fmt.Errorf("fuzz does not accept any positional arguments, only flags and their associated values") - } - return nil + Use: "fuzz", + Short: "Starts a fuzzing campaign", + Long: `Starts a fuzzing campaign`, + Args: cmdValidateFuzzArgs, + ValidArgsFunction: cmdValidFuzzArgs, + RunE: cmdRunFuzz, + SilenceUsage: true, + SilenceErrors: true, } func init() { // Add all the flags allowed for the fuzz command err := addFuzzFlags() if err != nil { - panic(err) + cmdLogger.Panic("Failed to initialize the fuzz command", err) } // Add the fuzz command and its associated flags to the root command rootCmd.AddCommand(fuzzCmd) } +// cmdValidFuzzArgs will return which flags and sub-commands are valid for dynamic completion for the fuzz command +func cmdValidFuzzArgs(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + // Gather a list of flags that are available to be used in the current command but have not been used yet + var unusedFlags []string + + // Examine all the flags, and add any flags that have not been set in the current command line + // to a list of unused flags + cmd.Flags().VisitAll(func(flag *pflag.Flag) { + if !flag.Changed { + // When adding a flag to a command, include the "--" prefix to indicate that it is a flag + // and not a positional argument. Additionally, when the user presses the TAB key twice after typing + // a flag name, the "--" prefix will appear again, indicating that more flags are available and that + // none of the arguments are positional. + unusedFlags = append(unusedFlags, "--"+flag.Name) + } + }) + // Provide a list of flags that can be used in the current command (but have not been used yet) + // for autocompletion suggestions + return unusedFlags, cobra.ShellCompDirectiveNoFileComp +} + +// cmdValidateFuzzArgs makes sure that there are no positional arguments provided to the fuzz command +func cmdValidateFuzzArgs(cmd *cobra.Command, args []string) error { + // Make sure we have no positional args + if err := cobra.NoArgs(cmd, args); err != nil { + err = fmt.Errorf("fuzz does not accept any positional arguments, only flags and their associated values") + cmdLogger.Error("Failed to validate args to the fuzz command", err) + return err + } + return nil +} + // cmdRunFuzz executes the CLI fuzz command and navigates through the following possibilities: // #1: We will search for either a custom config file (via --config) or the default (medusa.json). // If we find it, read it. If we can't read it, throw an error. @@ -52,6 +82,7 @@ func cmdRunFuzz(cmd *cobra.Command, args []string) error { configFlagUsed := cmd.Flags().Changed("config") configPath, err := cmd.Flags().GetString("config") if err != nil { + cmdLogger.Error("Failed to run the fuzz command", err) return err } @@ -59,6 +90,7 @@ func cmdRunFuzz(cmd *cobra.Command, args []string) error { if !configFlagUsed { workingDirectory, err := os.Getwd() if err != nil { + cmdLogger.Error("Failed to run the fuzz command", err) return err } configPath = filepath.Join(workingDirectory, DefaultProjectConfigFilename) @@ -70,24 +102,29 @@ func cmdRunFuzz(cmd *cobra.Command, args []string) error { // Possibility #1: File was found if existenceError == nil { // Try to read the configuration file and throw an error if something goes wrong - projectConfig, err = config.ReadProjectConfigFromFile(configPath) + cmdLogger.Info("Reading the configuration file at: ", colors.Bold, configPath, colors.Reset) + // Use the default compilation platform if the config file doesn't specify one + projectConfig, err = config.ReadProjectConfigFromFile(configPath, DefaultCompilationPlatform) if err != nil { + cmdLogger.Error("Failed to run the fuzz command", err) return err } } // Possibility #2: If the --config flag was used, and we couldn't find the file, we'll throw an error if configFlagUsed && existenceError != nil { + cmdLogger.Error("Failed to run the fuzz command", err) return existenceError } // Possibility #3: --config flag was not used and medusa.json was not found, so use the default project config if !configFlagUsed && existenceError != nil { - fmt.Printf("unable to find the config file at %v. will use the default project configuration for the "+ - "%v compilation platform instead\n", configPath, DefaultCompilationPlatform) + cmdLogger.Warn(fmt.Sprintf("Unable to find the config file at %v, will use the default project configuration for the "+ + "%v compilation platform instead", configPath, DefaultCompilationPlatform)) projectConfig, err = config.GetDefaultProjectConfig(DefaultCompilationPlatform) if err != nil { + cmdLogger.Error("Failed to run the fuzz command", err) return err } } @@ -95,6 +132,7 @@ func cmdRunFuzz(cmd *cobra.Command, args []string) error { // Update the project configuration given whatever flags were set using the CLI err = updateProjectConfigWithFuzzFlags(cmd, projectConfig) if err != nil { + cmdLogger.Error("Failed to run the fuzz command", err) return err } @@ -104,13 +142,18 @@ func cmdRunFuzz(cmd *cobra.Command, args []string) error { // be in the config directory when running this. err = os.Chdir(filepath.Dir(configPath)) if err != nil { + cmdLogger.Error("Failed to run the fuzz command", err) return err } + if !projectConfig.Fuzzing.CoverageEnabled { + cmdLogger.Warn("Disabling coverage may limit efficacy of fuzzing. Consider enabling coverage for better results.") + } + // Create our fuzzing - fuzzer, err := fuzzing.NewFuzzer(*projectConfig) - if err != nil { - return err + fuzzer, fuzzErr := fuzzing.NewFuzzer(*projectConfig) + if fuzzErr != nil { + return exitcodes.NewErrorWithExitCode(fuzzErr, exitcodes.ExitCodeHandledError) } // Stop our fuzzing on keyboard interrupts @@ -122,7 +165,15 @@ func cmdRunFuzz(cmd *cobra.Command, args []string) error { }() // Start the fuzzing process with our cancellable context. - err = fuzzer.Start() + fuzzErr = fuzzer.Start() + if fuzzErr != nil { + return exitcodes.NewErrorWithExitCode(fuzzErr, exitcodes.ExitCodeHandledError) + } + + // If we have no error and failed test cases, we'll want to return a special exit code + if fuzzErr == nil && len(fuzzer.TestCasesWithStatus(fuzzing.TestCaseStatusFailed)) > 0 { + return exitcodes.NewErrorWithExitCode(fuzzErr, exitcodes.ExitCodeTestFailed) + } - return err + return fuzzErr } diff --git a/cmd/fuzz_flags.go b/cmd/fuzz_flags.go index 936d6598..9e1c9d37 100644 --- a/cmd/fuzz_flags.go +++ b/cmd/fuzz_flags.go @@ -21,8 +21,8 @@ func addFuzzFlags() error { // Config file fuzzCmd.Flags().String("config", "", "path to config file") - // Target - fuzzCmd.Flags().String("target", "", TargetFlagDescription) + // Compilation Target + fuzzCmd.Flags().String("compilation-target", "", TargetFlagDescription) // Number of workers fuzzCmd.Flags().Int("workers", 0, @@ -40,14 +40,13 @@ func addFuzzFlags() error { fuzzCmd.Flags().Int("seq-len", 0, fmt.Sprintf("maximum transactions to run in sequence (unless a config file is provided, default is %d)", defaultConfig.Fuzzing.CallSequenceLength)) - // Deployment order - fuzzCmd.Flags().StringSlice("deployment-order", []string{}, - fmt.Sprintf("order in which to deploy target contracts (unless a config file is provided, default is %v)", defaultConfig.Fuzzing.DeploymentOrder)) + // Target contracts + fuzzCmd.Flags().StringSlice("target-contracts", []string{}, + fmt.Sprintf("target contracts for fuzz testing (unless a config file is provided, default is %v)", defaultConfig.Fuzzing.TargetContracts)) // Corpus directory - // TODO: Update description when we add "coverage reports" feature fuzzCmd.Flags().String("corpus-dir", "", - fmt.Sprintf("directory path for corpus items (unless a config file is provided, default is %q)", defaultConfig.Fuzzing.CorpusDirectory)) + fmt.Sprintf("directory path for corpus items and coverage reports (unless a config file is provided, default is %q)", defaultConfig.Fuzzing.CorpusDirectory)) // Senders fuzzCmd.Flags().StringSlice("senders", []string{}, @@ -57,13 +56,13 @@ func addFuzzFlags() error { fuzzCmd.Flags().String("deployer", "", "account address used to deploy contracts") - // Assertion mode - fuzzCmd.Flags().Bool("assertion-mode", false, - fmt.Sprintf("enable assertion mode (unless a config file is provided, default is %t)", defaultConfig.Fuzzing.Testing.AssertionTesting.Enabled)) - // Trace all fuzzCmd.Flags().Bool("trace-all", false, fmt.Sprintf("print the execution trace for every element in a shrunken call sequence instead of only the last element (unless a config file is provided, default is %t)", defaultConfig.Fuzzing.Testing.TraceAll)) + + // Logging color + fuzzCmd.Flags().Bool("no-color", false, "disabled colored terminal output") + return nil } @@ -71,10 +70,10 @@ func addFuzzFlags() error { func updateProjectConfigWithFuzzFlags(cmd *cobra.Command, projectConfig *config.ProjectConfig) error { var err error - // If --target was used - if cmd.Flags().Changed("target") { + // If --compilation-target was used + if cmd.Flags().Changed("compilation-target") { // Get the new target - newTarget, err := cmd.Flags().GetString("target") + newTarget, err := cmd.Flags().GetString("compilation-target") if err != nil { return err } @@ -117,9 +116,9 @@ func updateProjectConfigWithFuzzFlags(cmd *cobra.Command, projectConfig *config. } } - // Update deployment order - if cmd.Flags().Changed("deployment-order") { - projectConfig.Fuzzing.DeploymentOrder, err = cmd.Flags().GetStringSlice("deployment-order") + // Update target contracts + if cmd.Flags().Changed("target-contracts") { + projectConfig.Fuzzing.TargetContracts, err = cmd.Flags().GetStringSlice("target-contracts") if err != nil { return err } @@ -149,17 +148,17 @@ func updateProjectConfigWithFuzzFlags(cmd *cobra.Command, projectConfig *config. } } - // Update assertion mode enablement - if cmd.Flags().Changed("assertion-mode") { - projectConfig.Fuzzing.Testing.AssertionTesting.Enabled, err = cmd.Flags().GetBool("assertion-mode") + // Update trace all enablement + if cmd.Flags().Changed("trace-all") { + projectConfig.Fuzzing.Testing.TraceAll, err = cmd.Flags().GetBool("trace-all") if err != nil { return err } } - // Update trace all enablement - if cmd.Flags().Changed("trace-all") { - projectConfig.Fuzzing.Testing.TraceAll, err = cmd.Flags().GetBool("trace-all") + // Update logging color mode + if cmd.Flags().Changed("no-color") { + projectConfig.Logging.NoColor, err = cmd.Flags().GetBool("no-color") if err != nil { return err } diff --git a/cmd/init.go b/cmd/init.go index a9685fe8..35cb918b 100644 --- a/cmd/init.go +++ b/cmd/init.go @@ -6,45 +6,88 @@ import ( "path/filepath" "strings" + "github.com/crytic/medusa/logging/colors" + "github.com/crytic/medusa/compilation" "github.com/crytic/medusa/fuzzing/config" "github.com/spf13/cobra" + "github.com/spf13/pflag" ) +// Get supported platforms for customized static completions of "init" flag `$ medusa init ` +// and to cache supported platforms for CLI arguments validation +var supportedPlatforms = compilation.GetSupportedCompilationPlatforms() + // initCmd represents the command provider for init var initCmd = &cobra.Command{ - Use: "init [platform]", - Short: "Initializes a project configuration", - Long: `Initializes a project configuration`, - Args: cmdValidateInitArgs, - RunE: cmdRunInit, + Use: "init [platform]", + Short: "Initializes a project configuration", + Long: `Initializes a project configuration`, + Args: cmdValidateInitArgs, + ValidArgsFunction: cmdValidInitArgs, + RunE: cmdRunInit, + SilenceUsage: true, + SilenceErrors: true, } func init() { // Add flags to init command err := addInitFlags() if err != nil { - panic(err) + cmdLogger.Panic("Failed to initialize the init command", err) } // Add the init command and its associated flags to the root command rootCmd.AddCommand(initCmd) } +// cmdValidInitArgs will return which flags and sub-commands are valid for dynamic completion for the init command +func cmdValidInitArgs(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + // Gather a list of flags that are available to be used in the current command but have not been used yet + var unusedFlags []string + + // Examine all the flags, and add any flags that have not been set in the current command line + // to a list of unused flags + flagUsed := false + cmd.Flags().VisitAll(func(flag *pflag.Flag) { + if !flag.Changed { + // When adding a flag to a command, include the "--" prefix to indicate that it is a flag + // and not a positional argument. Additionally, when the user presses the TAB key twice after typing + // a flag name, the "--" prefix will appear again, indicating that more flags are available and that + // none of the arguments are positional. + unusedFlags = append(unusedFlags, "--"+flag.Name) + } else { + // If any flag has been used, set flag used to true. This will be used later in the function. + flagUsed = true + } + }) + + // If a default platform is not specified, add a list of available platforms to the list of unused flags. + // If any flag is used, then we can assume that the default platform is used so we don't need to add supported platforms + if len(args) == 0 && !flagUsed { + unusedFlags = append(unusedFlags, supportedPlatforms...) + } + + // Provide a list of flags that can be used in the current command (but have not been used yet) + // for autocompletion suggestions + return unusedFlags, cobra.ShellCompDirectiveNoFileComp +} + // cmdValidateInitArgs validates CLI arguments func cmdValidateInitArgs(cmd *cobra.Command, args []string) error { - // Cache supported platforms - supportedPlatforms := compilation.GetSupportedCompilationPlatforms() - // Make sure we have no more than 1 arg if err := cobra.RangeArgs(0, 1)(cmd, args); err != nil { - return fmt.Errorf("init accepts at most 1 platform argument (options: %s). "+ + err = fmt.Errorf("init accepts at most 1 platform argument (options: %s). "+ "default platform is %v\n", strings.Join(supportedPlatforms, ", "), DefaultCompilationPlatform) + cmdLogger.Error("Failed to validate args to the init command", err) + return err } // Ensure the optional provided argument refers to a supported platform if len(args) == 1 && !compilation.IsSupportedCompilationPlatform(args[0]) { - return fmt.Errorf("init was provided invalid platform argument '%s' (options: %s)", args[0], strings.Join(supportedPlatforms, ", ")) + err := fmt.Errorf("init was provided invalid platform argument '%s' (options: %s)", args[0], strings.Join(supportedPlatforms, ", ")) + cmdLogger.Error("Failed to validate args to the init command", err) + return err } return nil @@ -56,12 +99,14 @@ func cmdRunInit(cmd *cobra.Command, args []string) error { outputFlagUsed := cmd.Flags().Changed("out") outputPath, err := cmd.Flags().GetString("out") if err != nil { + cmdLogger.Error("Failed to run the init command", err) return err } // If we weren't provided an output path (flag was not used), we use our working directory if !outputFlagUsed { workingDirectory, err := os.Getwd() if err != nil { + cmdLogger.Error("Failed to run the init command", err) return err } outputPath = filepath.Join(workingDirectory, DefaultProjectConfigFilename) @@ -70,6 +115,7 @@ func cmdRunInit(cmd *cobra.Command, args []string) error { // By default, projectConfig will be the default project config for the DefaultCompilationPlatform projectConfig, err := config.GetDefaultProjectConfig(DefaultCompilationPlatform) if err != nil { + cmdLogger.Error("Failed to run the init command", err) return err } @@ -78,6 +124,8 @@ func cmdRunInit(cmd *cobra.Command, args []string) error { if len(args) == 1 && args[0] != DefaultCompilationPlatform { projectConfig, err = config.GetDefaultProjectConfig(args[0]) if err != nil { + cmdLogger.Error("Failed to run the init command", err) + return err } } @@ -85,12 +133,31 @@ func cmdRunInit(cmd *cobra.Command, args []string) error { // Update the project configuration given whatever flags were set using the CLI err = updateProjectConfigWithInitFlags(cmd, projectConfig) if err != nil { + cmdLogger.Error("Failed to run the init command", err) return err } + if _, err = os.Stat(outputPath); err == nil { + // Prompt user for overwrite confirmation + fmt.Print("The file already exists. Overwrite? (y/n): ") + var response string + if _, err := fmt.Scan(&response); err != nil { + // Handle the error (e.g., log it, return an error) + cmdLogger.Error("Failed to scan input", err) + return err + } + + if response != "y" && response != "Y" { + fmt.Println("Operation canceled.") + return nil + } + + } + // Write our project configuration err = projectConfig.WriteToFile(outputPath) if err != nil { + cmdLogger.Error("Failed to run the init command", err) return err } @@ -98,6 +165,6 @@ func cmdRunInit(cmd *cobra.Command, args []string) error { if absoluteOutputPath, err := filepath.Abs(outputPath); err == nil { outputPath = absoluteOutputPath } - fmt.Printf("Project configuration successfully output to: %s\n", outputPath) + cmdLogger.Info("Project configuration successfully output to: ", colors.Bold, outputPath, colors.Reset) return nil } diff --git a/cmd/init_flags.go b/cmd/init_flags.go index 40c3499a..dfe6d8d6 100644 --- a/cmd/init_flags.go +++ b/cmd/init_flags.go @@ -10,18 +10,18 @@ func addInitFlags() error { // Output path for configuration initCmd.Flags().String("out", "", "output path for the new project configuration file") - // Target file / directory - initCmd.Flags().String("target", "", TargetFlagDescription) + // Target file / directory for compilation + initCmd.Flags().String("compilation-target", "", TargetFlagDescription) return nil } // updateProjectConfigWithInitFlags will update the given projectConfig with any CLI arguments that were provided to the init command func updateProjectConfigWithInitFlags(cmd *cobra.Command, projectConfig *config.ProjectConfig) error { - // If --target was used - if cmd.Flags().Changed("target") { + // If --compilation-target was used + if cmd.Flags().Changed("compilation-target") { // Get the new target - newTarget, err := cmd.Flags().GetString("target") + newTarget, err := cmd.Flags().GetString("compilation-target") if err != nil { return err } diff --git a/cmd/root.go b/cmd/root.go index 4cd678f3..6e710b61 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -1,10 +1,13 @@ package cmd import ( + "github.com/crytic/medusa/logging" + "github.com/rs/zerolog" "github.com/spf13/cobra" + "os" ) -const version = "0.1.0" +const version = "0.1.8" // rootCmd represents the root CLI command object which all other commands stem from. var rootCmd = &cobra.Command{ @@ -14,9 +17,14 @@ var rootCmd = &cobra.Command{ Long: "medusa is a solidity smart contract fuzzing harness", } -// Execute provides an exportable function to invoke the CLI. -// Returns an error if one was encountered. +// cmdLogger is the logger that will be used for the cmd package +var cmdLogger = logging.NewLogger(zerolog.InfoLevel) + +// Execute provides an exportable function to invoke the CLI. Returns an error if one was encountered. func Execute() error { + // Add stdout as an unstructured, colorized output stream for the command logger + cmdLogger.AddWriter(os.Stdout, logging.UNSTRUCTURED, true) + rootCmd.CompletionOptions.DisableDefaultCmd = true return rootCmd.Execute() } diff --git a/compilation/abiutils/solidity_errors.go b/compilation/abiutils/solidity_errors.go index e7335889..88ce7163 100644 --- a/compilation/abiutils/solidity_errors.go +++ b/compilation/abiutils/solidity_errors.go @@ -2,9 +2,12 @@ package abiutils import ( "bytes" + "errors" + "fmt" + "math/big" + "github.com/ethereum/go-ethereum/accounts/abi" "github.com/ethereum/go-ethereum/core/vm" - "math/big" ) // An enum is defined below providing all `Panic(uint)` error codes returned in return data when the VM encounters @@ -36,7 +39,7 @@ func GetSolidityPanicCode(returnError error, returnData []byte, backwardsCompati } // Verify we have a revert, and our return data fits exactly the selector + uint256 - if returnError == vm.ErrExecutionReverted && len(returnData) == 4+32 { + if errors.Is(returnError, vm.ErrExecutionReverted) && len(returnData) == 4+32 { uintType, _ := abi.NewType("uint256", "", nil) panicReturnDataAbi := abi.NewMethod("Panic", "Panic", abi.Function, "", false, false, []abi.Argument{ {Name: "", Type: uintType, Indexed: false}, @@ -60,7 +63,7 @@ func GetSolidityPanicCode(returnError error, returnData []byte, backwardsCompati // If the error and return data are not representative of an Error, then nil is returned. func GetSolidityRevertErrorString(returnError error, returnData []byte) *string { // Verify we have a revert, and our return data fits the selector + additional data. - if returnError == vm.ErrExecutionReverted && len(returnData) > 4 { + if errors.Is(returnError, vm.ErrExecutionReverted) && len(returnData) > 4 { stringType, _ := abi.NewType("string", "", nil) errorReturnDataAbi := abi.NewMethod("Error", "Error", abi.Function, "", false, false, []abi.Argument{ {Name: "", Type: stringType, Indexed: false}, @@ -87,7 +90,7 @@ func GetSolidityRevertErrorString(returnError error, returnData []byte) *string func GetSolidityCustomRevertError(contractAbi *abi.ABI, returnError error, returnData []byte) (*abi.Error, []any) { // If no ABI was given or a revert was not encountered, no custom error can be extracted, or may exist, // respectively. - if returnError != vm.ErrExecutionReverted || contractAbi == nil { + if !errors.Is(returnError, vm.ErrExecutionReverted) || contractAbi == nil { return nil, nil } @@ -105,3 +108,33 @@ func GetSolidityCustomRevertError(contractAbi *abi.ABI, returnError error, retur } return nil, nil } + +// GetPanicReason will take in a panic code as an uint64 and will return the string reason behind that panic code. For +// example, if panic code is PanicCodeAssertFailed, then "assertion failure" is returned. +func GetPanicReason(panicCode uint64) string { + // Switch on panic code + switch panicCode { + case PanicCodeCompilerInserted: + return "panic: compiler inserted panic" + case PanicCodeAssertFailed: + return "panic: assertion failed" + case PanicCodeArithmeticUnderOverflow: + return "panic: arithmetic underflow" + case PanicCodeDivideByZero: + return "panic: division by zero" + case PanicCodeEnumTypeConversionOutOfBounds: + return "panic: enum access out of bounds" + case PanicCodeIncorrectStorageAccess: + return "panic: incorrect storage access" + case PanicCodePopEmptyArray: + return "panic: pop on empty array" + case PanicCodeOutOfBoundsArrayAccess: + return "panic: out of bounds array access" + case PanicCodeAllocateTooMuchMemory: + return "panic; overallocation of memory" + case PanicCodeCallUninitializedVariable: + return "panic: call on uninitialized variable" + default: + return fmt.Sprintf("unknown panic code(%v)", panicCode) + } +} diff --git a/compilation/compilation_config.go b/compilation/compilation_config.go index 9f676f29..6f56bc88 100644 --- a/compilation/compilation_config.go +++ b/compilation/compilation_config.go @@ -52,11 +52,6 @@ func NewCompilationConfigFromPlatformConfig(platformConfig platforms.PlatformCon // is then used to compile the underlying targets. Returns a list of compilations returned by the platform provider or // an error. Command-line input may also be returned in either case., func (c *CompilationConfig) Compile() ([]types.Compilation, string, error) { - // Verify the platform is valid - if !IsSupportedCompilationPlatform(c.Platform) { - return nil, "", fmt.Errorf("could not compile from configs: platform '%s' is unsupported", c.Platform) - } - // Get the platform config platformConfig, err := c.GetPlatformConfig() if err != nil { @@ -69,6 +64,16 @@ func (c *CompilationConfig) Compile() ([]types.Compilation, string, error) { // GetPlatformConfig will return the de-serialized version of platforms.PlatformConfig for a given CompilationConfig func (c *CompilationConfig) GetPlatformConfig() (platforms.PlatformConfig, error) { + // Ensure that the platform is non-empty + if c.Platform == "" { + return nil, fmt.Errorf("must specify a platform for compilation") + } + + // Ensure that the platform is supported + if !IsSupportedCompilationPlatform(c.Platform) { + return nil, fmt.Errorf("compilation platform '%v' is unsupported", c.Platform) + } + // Allocate a platform config given our platform string in our compilation config // It is necessary to do so as json.Unmarshal needs a concrete structure to populate platformConfig := GetDefaultPlatformConfig(c.Platform) @@ -87,7 +92,10 @@ func (c *CompilationConfig) SetPlatformConfig(platformConfig platforms.PlatformC return errors.New("platformConfig must be non-nil") } - // Update platform + // Update platform, assuming the platform is supported + if !IsSupportedCompilationPlatform(platformConfig.Platform()) { + return fmt.Errorf("compilation platform '%v' is unsupported", platformConfig.Platform()) + } c.Platform = platformConfig.Platform() // Serialize diff --git a/compilation/platforms/crytic_compile.go b/compilation/platforms/crytic_compile.go index de01873c..77d43245 100644 --- a/compilation/platforms/crytic_compile.go +++ b/compilation/platforms/crytic_compile.go @@ -11,6 +11,7 @@ import ( "strings" "github.com/crytic/medusa/compilation/types" + "github.com/crytic/medusa/logging" "github.com/crytic/medusa/utils" ) @@ -114,6 +115,7 @@ func (c *CryticCompilationConfig) Compile() ([]types.Compilation, string, error) // Get main command and set working directory cmd := exec.Command("crytic-compile", args...) + logging.GlobalLogger.Info("Running command:\n", cmd.String()) // Install a specific `solc` version if requested in the config if c.SolcVersion != "" { @@ -143,7 +145,7 @@ func (c *CryticCompilationConfig) Compile() ([]types.Compilation, string, error) var compilationList []types.Compilation // Define the structure of our crytic-compile export data. - type solcExportSource struct { + type solcSourceUnit struct { AST any `json:"AST"` } type solcExportContract struct { @@ -154,7 +156,7 @@ func (c *CryticCompilationConfig) Compile() ([]types.Compilation, string, error) BinRuntime string `json:"bin-runtime"` } type solcExportData struct { - Sources map[string]solcExportSource `json:"sources"` + Sources map[string]solcSourceUnit `json:"sources"` Contracts map[string]solcExportContract `json:"contracts"` } @@ -176,12 +178,40 @@ func (c *CryticCompilationConfig) Compile() ([]types.Compilation, string, error) // Create a compilation object that will store the contracts and source information. compilation := types.NewCompilation() + // Create a map of contract names to their kinds + contractKinds := make(map[string]types.ContractKind) + // Loop through all sources and parse them into our types. for sourcePath, source := range solcExport.Sources { - compilation.Sources[sourcePath] = types.CompiledSource{ - Ast: source.AST, - Contracts: make(map[string]types.CompiledContract), + // Convert the AST into our version of the AST (types.AST) + var ast types.AST + b, err = json.Marshal(source.AST) + if err != nil { + return nil, "", fmt.Errorf("could not encode AST from sources: %v", err) + } + err = json.Unmarshal(b, &ast) + if err != nil { + return nil, "", fmt.Errorf("could not parse AST from sources: %v", err) + } + + // From the AST, extract the contract kinds where the contract definition could be for a contract, library, + // or interface + for _, node := range ast.Nodes { + if node.GetNodeType() == "ContractDefinition" { + contractDefinition := node.(types.ContractDefinition) + contractKinds[contractDefinition.CanonicalName] = contractDefinition.Kind + } + } + + // Retrieve the source unit ID + sourceUnitId := types.GetSrcMapSourceUnitID(ast.Src) + compilation.SourcePathToArtifact[sourcePath] = types.SourceArtifact{ + // TODO: Our types.AST is not the same as the original AST but we could parse it and avoid using "any" + Ast: source.AST, + Contracts: make(map[string]types.CompiledContract), + SourceUnitId: sourceUnitId, } + compilation.SourceIdToPath[sourceUnitId] = sourcePath } // Loop through all contracts and parse them into our types. @@ -196,12 +226,12 @@ func (c *CryticCompilationConfig) Compile() ([]types.Compilation, string, error) // Ensure a source exists for this, or create one if our path somehow differed from any // path not existing in the "sources" key at the root of the export. - if _, ok := compilation.Sources[sourcePath]; !ok { - parentSource := types.CompiledSource{ + if _, ok := compilation.SourcePathToArtifact[sourcePath]; !ok { + parentSource := types.SourceArtifact{ Ast: nil, Contracts: make(map[string]types.CompiledContract), } - compilation.Sources[sourcePath] = parentSource + compilation.SourcePathToArtifact[sourcePath] = parentSource } // Parse the ABI @@ -221,12 +251,13 @@ func (c *CryticCompilationConfig) Compile() ([]types.Compilation, string, error) } // Add contract details - compilation.Sources[sourcePath].Contracts[contractName] = types.CompiledContract{ + compilation.SourcePathToArtifact[sourcePath].Contracts[contractName] = types.CompiledContract{ Abi: *contractAbi, InitBytecode: initBytecode, RuntimeBytecode: runtimeBytecode, SrcMapsInit: contract.SrcMap, SrcMapsRuntime: contract.SrcMapRuntime, + Kind: contractKinds[contractName], } } diff --git a/compilation/platforms/crytic_compile_test.go b/compilation/platforms/crytic_compile_test.go index 2b5e27c4..5f0f6e70 100644 --- a/compilation/platforms/crytic_compile_test.go +++ b/compilation/platforms/crytic_compile_test.go @@ -1,22 +1,23 @@ package platforms import ( - "github.com/crytic/medusa/compilation/types" - "github.com/crytic/medusa/utils" - "github.com/crytic/medusa/utils/testutils" - "github.com/stretchr/testify/assert" "os" "os/exec" "path/filepath" "strings" "testing" + + "github.com/crytic/medusa/compilation/types" + "github.com/crytic/medusa/utils" + "github.com/crytic/medusa/utils/testutils" + "github.com/stretchr/testify/assert" ) // testCryticGetCompiledSourceByBaseName checks if a given source file exists in a given compilation's map of sources. // The source file is the file name of a specific file. This function simply checks one of the paths ends with // this name. Avoid including any directories in case the path separators differ per system. // Returns the types.CompiledSource (mapping value) associated to the path if it is found. Returns nil otherwise. -func testCryticGetCompiledSourceByBaseName(sources map[string]types.CompiledSource, name string) *types.CompiledSource { +func testCryticGetCompiledSourceByBaseName(sources map[string]types.SourceArtifact, name string) *types.SourceArtifact { // Obtain a lower case version of our name to search for lowerName := strings.ToLower(name) @@ -39,7 +40,7 @@ func testCryticGetCompiledSourceByBaseName(sources map[string]types.CompiledSour // file path. func TestCryticSingleFileAbsolutePath(t *testing.T) { // Copy our testdata over to our testing directory - contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/SimpleContract.sol") + contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/basic/SimpleContract.sol") // Execute our tests in the given test path testutils.ExecuteInDirectory(t, contractPath, func() { @@ -53,10 +54,10 @@ func TestCryticSingleFileAbsolutePath(t *testing.T) { // One compilation object assert.EqualValues(t, 1, len(compilations)) // One source because we specified one file - assert.EqualValues(t, 1, len(compilations[0].Sources)) + assert.EqualValues(t, 1, len(compilations[0].SourcePathToArtifact)) // Two contracts in SimpleContract.sol contractCount := 0 - for _, source := range compilations[0].Sources { + for _, source := range compilations[0].SourcePathToArtifact { contractCount += len(source.Contracts) } assert.EqualValues(t, 2, contractCount) @@ -67,7 +68,7 @@ func TestCryticSingleFileAbsolutePath(t *testing.T) { // file path in the working directory. func TestCryticSingleFileRelativePathSameDirectory(t *testing.T) { // Copy our testdata over to our testing directory - contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/SimpleContract.sol") + contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/basic/SimpleContract.sol") contractName := filepath.Base(contractPath) // Execute our tests in the given test path @@ -82,10 +83,10 @@ func TestCryticSingleFileRelativePathSameDirectory(t *testing.T) { // One compilation object assert.EqualValues(t, 1, len(compilations)) // One source because we specified one file - assert.EqualValues(t, 1, len(compilations[0].Sources)) + assert.EqualValues(t, 1, len(compilations[0].SourcePathToArtifact)) // Two contracts in SimpleContract.sol contractCount := 0 - for _, source := range compilations[0].Sources { + for _, source := range compilations[0].SourcePathToArtifact { contractCount += len(source.Contracts) } assert.EqualValues(t, 2, contractCount) @@ -96,7 +97,7 @@ func TestCryticSingleFileRelativePathSameDirectory(t *testing.T) { // file path in a child directory of the working directory. func TestCryticSingleFileRelativePathChildDirectory(t *testing.T) { // Copy our testdata over to our testing directory - contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/SimpleContract.sol") + contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/basic/SimpleContract.sol") // Move it to a subdirectory contractDirectory := filepath.Dir(contractPath) @@ -118,10 +119,10 @@ func TestCryticSingleFileRelativePathChildDirectory(t *testing.T) { // One compilation object assert.EqualValues(t, 1, len(compilations)) // One source because we specified one file - assert.EqualValues(t, 1, len(compilations[0].Sources)) + assert.EqualValues(t, 1, len(compilations[0].SourcePathToArtifact)) // Two contracts in SimpleContract.sol contractCount := 0 - for _, source := range compilations[0].Sources { + for _, source := range compilations[0].SourcePathToArtifact { contractCount += len(source.Contracts) } assert.EqualValues(t, 2, contractCount) @@ -137,7 +138,7 @@ func TestCryticSingleFileRelativePathChildDirectory(t *testing.T) { // a relative path provided. func TestCryticSingleFileBuildDirectoryArgRelativePath(t *testing.T) { // Copy our testdata over to our testing directory - contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/SimpleContract.sol") + contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/basic/SimpleContract.sol") // Execute our tests in the given test path testutils.ExecuteInDirectory(t, contractPath, func() { @@ -160,9 +161,9 @@ func TestCryticSingleFileBuildDirectoryArgRelativePath(t *testing.T) { // One compilation object assert.EqualValues(t, 1, len(compilations)) // One source because we specified one file - assert.EqualValues(t, 1, len(compilations[0].Sources)) + assert.EqualValues(t, 1, len(compilations[0].SourcePathToArtifact)) // Two contracts in SimpleContract.sol. - compiledSource := testCryticGetCompiledSourceByBaseName(compilations[0].Sources, contractName) + compiledSource := testCryticGetCompiledSourceByBaseName(compilations[0].SourcePathToArtifact, contractName) assert.NotNil(t, compiledSource, "source file could not be resolved in compilation sources") assert.EqualValues(t, 2, len(compiledSource.Contracts)) }) @@ -172,7 +173,7 @@ func TestCryticSingleFileBuildDirectoryArgRelativePath(t *testing.T) { // (e.g. export-dir, export-format) func TestCryticSingleFileBadArgs(t *testing.T) { // Copy our testdata over to our testing directory - contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/SimpleContract.sol") + contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/basic/SimpleContract.sol") // Execute our tests in the given test path testutils.ExecuteInDirectory(t, contractPath, func() { @@ -198,6 +199,34 @@ func TestCryticSingleFileBadArgs(t *testing.T) { }) } +// TestCryticMultipleFiles tests compilation of a single target that inherits from another file. +func TestCryticMultipleFiles(t *testing.T) { + // Copy our testdata over to our testing directory + contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/basic/") + + // Execute our tests in the given test path + testutils.ExecuteInDirectory(t, contractPath, func() { + // Create our platform configuration + config := NewCryticCompilationConfig("DerivedContract.sol") + + // Compile the file + compilations, _, err := config.Compile() + assert.NoError(t, err) + + // Verify there is one compilation object + assert.EqualValues(t, 1, len(compilations)) + // Verify there are two sources + assert.EqualValues(t, 2, len(compilations[0].SourcePathToArtifact)) + + // Verify there are three contracts + contractCount := 0 + for _, source := range compilations[0].SourcePathToArtifact { + contractCount += len(source.Contracts) + } + assert.EqualValues(t, 3, contractCount) + }) +} + // TestCryticDirectoryNoArgs tests compilation of a hardhat directory with no addition arguments provided func TestCryticDirectoryNoArgs(t *testing.T) { // Copy our testdata over to our testing directory @@ -219,16 +248,16 @@ func TestCryticDirectoryNoArgs(t *testing.T) { // Two compilation objects assert.EqualValues(t, 2, len(compilations)) // One source per compilation unit - assert.EqualValues(t, 1, len(compilations[0].Sources)) - assert.EqualValues(t, 1, len(compilations[1].Sources)) + assert.EqualValues(t, 1, len(compilations[0].SourcePathToArtifact)) + assert.EqualValues(t, 1, len(compilations[1].SourcePathToArtifact)) // Obtain the compiled source from both compilation units firstContractName := "FirstContract.sol" secondContractName := "SecondContract.sol" - firstUnitFirstContractSource := testCryticGetCompiledSourceByBaseName(compilations[0].Sources, firstContractName) - firstUnitSecondContractSource := testCryticGetCompiledSourceByBaseName(compilations[0].Sources, secondContractName) - secondUnitFirstContractSource := testCryticGetCompiledSourceByBaseName(compilations[1].Sources, firstContractName) - secondUnitSecondContractSource := testCryticGetCompiledSourceByBaseName(compilations[1].Sources, secondContractName) + firstUnitFirstContractSource := testCryticGetCompiledSourceByBaseName(compilations[0].SourcePathToArtifact, firstContractName) + firstUnitSecondContractSource := testCryticGetCompiledSourceByBaseName(compilations[0].SourcePathToArtifact, secondContractName) + secondUnitFirstContractSource := testCryticGetCompiledSourceByBaseName(compilations[1].SourcePathToArtifact, firstContractName) + secondUnitSecondContractSource := testCryticGetCompiledSourceByBaseName(compilations[1].SourcePathToArtifact, secondContractName) // Assert that each compilation unit should have two contracts in it. // Compilation unit ordering is non-deterministic in JSON output diff --git a/compilation/platforms/solc.go b/compilation/platforms/solc.go index 989a7e53..068cbeb7 100644 --- a/compilation/platforms/solc.go +++ b/compilation/platforms/solc.go @@ -105,10 +105,13 @@ func (s *SolcCompilationConfig) Compile() ([]types.Compilation, string, error) { // Create a compilation unit out of this. compilation := types.NewCompilation() + // Create a map of contract names to their kinds + contractKinds := make(map[string]types.ContractKind) + // Parse our sources from solc output if sources, ok := results["sources"]; ok { if sourcesMap, ok := sources.(map[string]any); ok { - for name, source := range sourcesMap { + for sourcePath, source := range sourcesMap { // Treat our source as a key-value lookup sourceDict, sourceCorrectType := source.(map[string]any) if !sourceCorrectType { @@ -116,16 +119,42 @@ func (s *SolcCompilationConfig) Compile() ([]types.Compilation, string, error) { } // Try to obtain our AST key - ast, hasAST := sourceDict["AST"] + origAST, hasAST := sourceDict["AST"] if !hasAST { return nil, "", fmt.Errorf("could not parse AST from sources, AST field could not be found") } + // Convert the AST into our version of the AST (types.AST) + var ast types.AST + b, err := json.Marshal(origAST) + if err != nil { + return nil, "", fmt.Errorf("could not encode AST from sources: %v", err) + } + err = json.Unmarshal(b, &ast) + if err != nil { + return nil, "", fmt.Errorf("could not parse AST from sources, error: %v", err) + } + + // From the AST, extract the contract kinds where the contract definition could be for a contract, library, + // or interface + for _, node := range ast.Nodes { + if node.GetNodeType() == "ContractDefinition" { + contractDefinition := node.(types.ContractDefinition) + contractKinds[contractDefinition.CanonicalName] = contractDefinition.Kind + } + } + + // Get the source unit ID + sourceUnitId := types.GetSrcMapSourceUnitID(ast.Src) // Construct our compiled source object - compilation.Sources[name] = types.CompiledSource{ - Ast: ast, - Contracts: make(map[string]types.CompiledContract), + compilation.SourcePathToArtifact[sourcePath] = types.SourceArtifact{ + // TODO our types.AST is not the same as the original AST but we could parse it and avoid using "any" + Ast: origAST, + Contracts: make(map[string]types.CompiledContract), + SourceUnitId: sourceUnitId, } + compilation.SourceIdToPath[sourceUnitId] = sourcePath + } } } @@ -135,6 +164,7 @@ func (s *SolcCompilationConfig) Compile() ([]types.Compilation, string, error) { if err != nil { return nil, "", err } + for name, contract := range contracts { // Split our name which should be of form "filename:contractname" nameSplit := strings.Split(name, ":") @@ -158,12 +188,13 @@ func (s *SolcCompilationConfig) Compile() ([]types.Compilation, string, error) { } // Construct our compiled contract - compilation.Sources[sourcePath].Contracts[contractName] = types.CompiledContract{ + compilation.SourcePathToArtifact[sourcePath].Contracts[contractName] = types.CompiledContract{ Abi: *contractAbi, InitBytecode: initBytecode, RuntimeBytecode: runtimeBytecode, SrcMapsInit: contract.Info.SrcMap.(string), SrcMapsRuntime: contract.Info.SrcMapRuntime, + Kind: contractKinds[contractName], } } diff --git a/compilation/platforms/solc_test.go b/compilation/platforms/solc_test.go index 95ad967d..0ed59ecc 100644 --- a/compilation/platforms/solc_test.go +++ b/compilation/platforms/solc_test.go @@ -19,12 +19,12 @@ func TestSolcVersion(t *testing.T) { // with an absolute target path in our platform config. func TestSimpleSolcCompilationAbsolutePath(t *testing.T) { // Copy our testdata over to our testing directory - contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/SimpleContract.sol") + contractDirectory := testutils.CopyToTestDirectory(t, "testdata/solc/basic/") // Execute our tests in the given test path - testutils.ExecuteInDirectory(t, contractPath, func() { + testutils.ExecuteInDirectory(t, contractDirectory, func() { // Create a solc provider - solc := NewSolcCompilationConfig(contractPath) + solc := NewSolcCompilationConfig(filepath.Join(contractDirectory, "DerivedContract.sol")) // Obtain our compilations and ensure we didn't encounter an error compilations, _, err := solc.Compile() @@ -37,13 +37,12 @@ func TestSimpleSolcCompilationAbsolutePath(t *testing.T) { // with a relative target path in our platform config. func TestSimpleSolcCompilationRelativePath(t *testing.T) { // Copy our testdata over to our testing directory - contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/SimpleContract.sol") - contractName := filepath.Base(contractPath) + contractDirectory := testutils.CopyToTestDirectory(t, "testdata/solc/basic/") // Execute our tests in the given test path - testutils.ExecuteInDirectory(t, contractPath, func() { + testutils.ExecuteInDirectory(t, contractDirectory, func() { // Create a solc provider - solc := NewSolcCompilationConfig(contractName) + solc := NewSolcCompilationConfig("DerivedContract.sol") // Obtain our solc version and ensure we didn't encounter an error compilations, _, err := solc.Compile() @@ -55,7 +54,7 @@ func TestSimpleSolcCompilationRelativePath(t *testing.T) { // TestFailedSolcCompilation tests that a single contract of invalid form should fail compilation. func TestFailedSolcCompilation(t *testing.T) { // Copy our testdata over to our testing directory - contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/FailedCompilationContract.sol") + contractPath := testutils.CopyToTestDirectory(t, "testdata/solc/bad/FailedCompilationContract.sol") // Execute our tests in the given test path testutils.ExecuteInDirectory(t, contractPath, func() { diff --git a/compilation/platforms/testdata/solc/SimpleContract.sol b/compilation/platforms/testdata/solc/SimpleContract.sol deleted file mode 100644 index d50df0a7..00000000 --- a/compilation/platforms/testdata/solc/SimpleContract.sol +++ /dev/null @@ -1,20 +0,0 @@ -contract SimpleContract { - uint x; - uint y; - - function setX(uint value) public { - x = value; - } - - function setY(uint value) public { - y = value; - } -} - -contract InheritedContract is SimpleContract { - uint z; - - function setZ(uint value) public { - z = value; - } -} diff --git a/compilation/platforms/testdata/solc/FailedCompilationContract.sol b/compilation/platforms/testdata/solc/bad/FailedCompilationContract.sol similarity index 100% rename from compilation/platforms/testdata/solc/FailedCompilationContract.sol rename to compilation/platforms/testdata/solc/bad/FailedCompilationContract.sol diff --git a/compilation/platforms/testdata/solc/basic/DerivedContract.sol b/compilation/platforms/testdata/solc/basic/DerivedContract.sol new file mode 100644 index 00000000..12f02015 --- /dev/null +++ b/compilation/platforms/testdata/solc/basic/DerivedContract.sol @@ -0,0 +1,9 @@ +import "./SimpleContract.sol"; + +contract DerivedContract is SimpleContract { + uint z; + + function setZ(uint value) public { + z = value; + } +} diff --git a/compilation/platforms/testdata/solc/basic/SimpleContract.sol b/compilation/platforms/testdata/solc/basic/SimpleContract.sol new file mode 100644 index 00000000..3cdcf6b9 --- /dev/null +++ b/compilation/platforms/testdata/solc/basic/SimpleContract.sol @@ -0,0 +1,26 @@ +contract SimpleContract { + uint x; + uint y; + + function setX(uint value) public { + x = value; + } + + function setY(uint value) public { + y = value; + } +} + +contract SimpleContract2 { + uint x; + uint y; + + function setX(uint value) public returns (bool) { + x = value; + return true; + } + + function setY(uint value) public { + y = value; + } +} \ No newline at end of file diff --git a/compilation/platforms/testdata/truffle/basic_project/contracts/SimpleContract.sol b/compilation/platforms/testdata/truffle/basic_project/contracts/SimpleContract.sol deleted file mode 100644 index d50df0a7..00000000 --- a/compilation/platforms/testdata/truffle/basic_project/contracts/SimpleContract.sol +++ /dev/null @@ -1,20 +0,0 @@ -contract SimpleContract { - uint x; - uint y; - - function setX(uint value) public { - x = value; - } - - function setY(uint value) public { - y = value; - } -} - -contract InheritedContract is SimpleContract { - uint z; - - function setZ(uint value) public { - z = value; - } -} diff --git a/compilation/platforms/testdata/truffle/basic_project/migrations/1_initial_migration.js b/compilation/platforms/testdata/truffle/basic_project/migrations/1_initial_migration.js deleted file mode 100644 index 16a7ba52..00000000 --- a/compilation/platforms/testdata/truffle/basic_project/migrations/1_initial_migration.js +++ /dev/null @@ -1,5 +0,0 @@ -const Migrations = artifacts.require("Migrations"); - -module.exports = function (deployer) { - deployer.deploy(Migrations); -}; diff --git a/compilation/platforms/testdata/truffle/basic_project/test/.gitkeep b/compilation/platforms/testdata/truffle/basic_project/test/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/compilation/platforms/testdata/truffle/basic_project/truffle-config.js b/compilation/platforms/testdata/truffle/basic_project/truffle-config.js deleted file mode 100644 index f10af854..00000000 --- a/compilation/platforms/testdata/truffle/basic_project/truffle-config.js +++ /dev/null @@ -1,116 +0,0 @@ -/** - * Use this file to configure your truffle project. It's seeded with some - * common settings for different networks and features like migrations, - * compilation and testing. Uncomment the ones you need or modify - * them to suit your project as necessary. - * - * More information about configuration can be found at: - * - * trufflesuite.com/docs/advanced/configuration - * - * To deploy via Infura you'll need a wallet provider (like @truffle/hdwallet-provider) - * to sign your transactions before they're sent to a remote public node. Infura accounts - * are available for free at: infura.io/register. - * - * You'll also need a mnemonic - the twelve word phrase the wallet uses to generate - * public/private key pairs. If you're publishing your code to GitHub make sure you load this - * phrase from a file you've .gitignored so it doesn't accidentally become public. - * - */ - -// const HDWalletProvider = require('@truffle/hdwallet-provider'); -// -// const fs = require('fs'); -// const mnemonic = fs.readFileSync(".secret").toString().trim(); - -module.exports = { - /** - * Networks define how you connect to your ethereum client and let you set the - * defaults web3 uses to send transactions. If you don't specify one truffle - * will spin up a development blockchain for you on port 9545 when you - * run `develop` or `test`. You can ask a truffle command to use a specific - * network from the command line, e.g - * - * $ truffle test --network - */ - - networks: { - // Useful for testing. The `development` name is special - truffle uses it by default - // if it's defined here and no other network is specified at the command line. - // You should run a client (like ganache-cli, geth or parity) in a separate terminal - // tab if you use this network and you must also set the `host`, `port` and `network_id` - // options below to some value. - // - // development: { - // host: "127.0.0.1", // Localhost (default: none) - // port: 8545, // Standard Ethereum port (default: none) - // network_id: "*", // Any network (default: none) - // }, - // Another network with more advanced options... - // advanced: { - // port: 8777, // Custom port - // network_id: 1342, // Custom network - // gas: 8500000, // Gas sent with each transaction (default: ~6700000) - // gasPrice: 20000000000, // 20 gwei (in wei) (default: 100 gwei) - // from:
, // Account to send txs from (default: accounts[0]) - // websocket: true // Enable EventEmitter interface for web3 (default: false) - // }, - // Useful for deploying to a public network. - // NB: It's important to wrap the provider as a function. - // ropsten: { - // provider: () => new HDWalletProvider(mnemonic, `https://ropsten.infura.io/v3/YOUR-PROJECT-ID`), - // network_id: 3, // Ropsten's id - // gas: 5500000, // Ropsten has a lower block limit than mainnet - // confirmations: 2, // # of confs to wait between deployments. (default: 0) - // timeoutBlocks: 200, // # of blocks before a deployment times out (minimum/default: 50) - // skipDryRun: true // Skip dry run before migrations? (default: false for public nets ) - // }, - // Useful for private networks - // private: { - // provider: () => new HDWalletProvider(mnemonic, `https://network.io`), - // network_id: 2111, // This network is yours, in the cloud. - // production: true // Treats this network as if it was a public net. (default: false) - // } - }, - - // Set default mocha options here, use special reporters etc. - mocha: { - // timeout: 100000 - }, - - // Configure your compilers - compilers: { - solc: { - // version: "0.5.1", // Fetch exact version from solc-bin (default: truffle's version) - // docker: true, // Use "0.5.1" you've installed locally with docker (default: false) - // settings: { // See the solidity docs for advice about optimization and evmVersion - // optimizer: { - // enabled: false, - // runs: 200 - // }, - // evmVersion: "byzantium" - // } - } - }, - - // Truffle DB is currently disabled by default; to enable it, change enabled: - // false to enabled: true. The default storage location can also be - // overridden by specifying the adapter settings, as shown in the commented code below. - // - // NOTE: It is not possible to migrate your contracts to truffle DB and you should - // make a backup of your artifacts to a safe location before enabling this feature. - // - // After you backed up your artifacts you can utilize db by running migrate as follows: - // $ truffle migrate --reset --compile-all - // - // db: { - // enabled: false, - // host: "127.0.0.1", - // adapter: { - // name: "sqlite", - // settings: { - // directory: ".db" - // } - // } - // } -}; diff --git a/compilation/platforms/truffle.go b/compilation/platforms/truffle.go deleted file mode 100644 index d522a818..00000000 --- a/compilation/platforms/truffle.go +++ /dev/null @@ -1,141 +0,0 @@ -package platforms - -import ( - "encoding/hex" - "encoding/json" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/crytic/medusa/compilation/types" -) - -type TruffleCompilationConfig struct { - Target string `json:"target"` - UseNpx bool `json:"useNpx"` - Command string `json:"command"` - BuildDirectory string `json:"buildDirectory"` -} - -func NewTruffleCompilationConfig(target string) *TruffleCompilationConfig { - return &TruffleCompilationConfig{ - Target: target, - UseNpx: true, - Command: "", - BuildDirectory: "", - } -} - -func (s *TruffleCompilationConfig) Platform() string { - return "truffle" -} - -// GetTarget returns the target for compilation -func (t *TruffleCompilationConfig) GetTarget() string { - return t.Target -} - -// SetTarget sets the new target for compilation -func (t *TruffleCompilationConfig) SetTarget(newTarget string) { - t.Target = newTarget -} - -func (s *TruffleCompilationConfig) Compile() ([]types.Compilation, string, error) { - // Determine the base command to use. - var baseCommandStr = "truffle" - if s.Command != "" { - baseCommandStr = s.Command - } - - // Execute solc to compile our target. - var cmd *exec.Cmd - if s.UseNpx { - cmd = exec.Command("npx", baseCommandStr, "compile", "--all") - } else { - cmd = exec.Command(baseCommandStr, "compile", "--all") - } - cmd.Dir = s.Target - out, err := cmd.CombinedOutput() - if err != nil { - return nil, "", fmt.Errorf("error while executing truffle:\nOUTPUT:\n%s\nERROR: %s\n", string(out), err.Error()) - } - - // Create a compilation unit out of this. - compilation := types.NewCompilation() - - // Find all the compiled truffle artifacts - buildDirectory := s.BuildDirectory - if buildDirectory == "" { - buildDirectory = filepath.Join(s.Target, "build", "contracts") - } - matches, err := filepath.Glob(filepath.Join(buildDirectory, "*.json")) - if err != nil { - return nil, "", err - } - - // Define our truffle structure to parse - type TruffleCompiledJson struct { - ContractName string `json:"contractName"` - Abi any `json:"abi"` - Bytecode string `json:"bytecode"` - DeployedBytecode string `json:"deployedBytecode"` - SourceMap string `json:"sourceMap"` - DeployedSourceMap string `json:"deployedSourceMap"` - Source string `json:"source"` - SourcePath string `json:"sourcePath"` - Ast any `json:"ast"` - } - - // Loop for each truffle artifact to parse our compilations. - for i := 0; i < len(matches); i++ { - // Read the compiled JSON file data - b, err := os.ReadFile(matches[i]) - if err != nil { - return nil, "", err - } - - // Parse the JSON - var compiledJson TruffleCompiledJson - err = json.Unmarshal(b, &compiledJson) - if err != nil { - return nil, "", err - } - - // Convert the abi structure to our parsed abi type - contractAbi, err := types.ParseABIFromInterface(compiledJson.Abi) - if err != nil { - continue - } - - // If we don't have a source for this file, create it. - if _, ok := compilation.Sources[compiledJson.SourcePath]; !ok { - compilation.Sources[compiledJson.SourcePath] = types.CompiledSource{ - Ast: compiledJson.Ast, - Contracts: make(map[string]types.CompiledContract), - } - } - - // Decode our init and runtime bytecode - initBytecode, err := hex.DecodeString(strings.TrimPrefix(compiledJson.Bytecode, "0x")) - if err != nil { - return nil, "", fmt.Errorf("unable to parse init bytecode for contract '%s'\n", compiledJson.ContractName) - } - runtimeBytecode, err := hex.DecodeString(strings.TrimPrefix(compiledJson.DeployedBytecode, "0x")) - if err != nil { - return nil, "", fmt.Errorf("unable to parse runtime bytecode for contract '%s'\n", compiledJson.ContractName) - } - - // Add our contract to the source - compilation.Sources[compiledJson.SourcePath].Contracts[compiledJson.ContractName] = types.CompiledContract{ - Abi: *contractAbi, - InitBytecode: initBytecode, - RuntimeBytecode: runtimeBytecode, - SrcMapsInit: compiledJson.SourceMap, - SrcMapsRuntime: compiledJson.DeployedSourceMap, - } - } - - return []types.Compilation{*compilation}, string(out), nil -} diff --git a/compilation/platforms/truffle_test.go b/compilation/platforms/truffle_test.go deleted file mode 100644 index 0def9e80..00000000 --- a/compilation/platforms/truffle_test.go +++ /dev/null @@ -1,24 +0,0 @@ -package platforms - -import ( - "github.com/crytic/medusa/utils/testutils" - "github.com/stretchr/testify/assert" - "testing" -) - -// TestTruffleCompilationAbsolutePath tests compilation of a truffle project with an absolute project path. -func TestTruffleCompilationAbsolutePath(t *testing.T) { - // Copy our testdata over to our testing directory - truffleDirectory := testutils.CopyToTestDirectory(t, "testdata/truffle/basic_project/") - - // Execute our tests in the given test path - testutils.ExecuteInDirectory(t, truffleDirectory, func() { - // Create a solc provider - truffleConfig := NewTruffleCompilationConfig(truffleDirectory) - - // Obtain our solc version and ensure we didn't encounter an error - compilations, _, err := truffleConfig.Compile() - assert.NoError(t, err) - assert.True(t, len(compilations) > 0) - }) -} diff --git a/compilation/supported_platforms.go b/compilation/supported_platforms.go index 0fdc2b49..55178abc 100644 --- a/compilation/supported_platforms.go +++ b/compilation/supported_platforms.go @@ -3,6 +3,7 @@ package compilation import ( "fmt" "github.com/crytic/medusa/compilation/platforms" + "github.com/crytic/medusa/logging" ) // defaultPlatformConfigGenerator is a mapping of platform identifier to generator functions which can be used to create @@ -16,7 +17,6 @@ func init() { // Define a list of default platform config generators generators := []func() platforms.PlatformConfig{ func() platforms.PlatformConfig { return platforms.NewSolcCompilationConfig("contract.sol") }, - func() platforms.PlatformConfig { return platforms.NewTruffleCompilationConfig(".") }, func() platforms.PlatformConfig { return platforms.NewCryticCompilationConfig(".") }, } @@ -31,7 +31,8 @@ func init() { // If this platform already exists in our mapping, panic. Each platform should have a unique identifier. if _, platformIdExists := defaultPlatformConfigGenerator[platformId]; platformIdExists { - panic(fmt.Errorf("the compilation platform '%s' is registered with more than one provider", platformId)) + err := fmt.Errorf("the compilation platform '%s' is registered with more than one provider", platformId) + logging.GlobalLogger.Panic("Failed to initialize default platform configurations", err) } // Add this entry to our mapping diff --git a/compilation/types/ast.go b/compilation/types/ast.go new file mode 100644 index 00000000..b1dd251c --- /dev/null +++ b/compilation/types/ast.go @@ -0,0 +1,204 @@ +package types + +import ( + "encoding/json" + "regexp" + "strconv" +) + +// ContractKind represents the kind of contract definition represented by an AST node +type ContractKind string + +const ( + // ContractKindContract represents a contract node + ContractKindContract ContractKind = "contract" + // ContractKindLibrary represents a library node + ContractKindLibrary ContractKind = "library" + // ContractKindInterface represents an interface node + ContractKindInterface ContractKind = "interface" +) + +// Node interface represents a generic AST node +type Node interface { + // GetNodeType returns solc's node type e.g. FunctionDefinition, ContractDefinition. + GetNodeType() string +} + +// FunctionDefinition is the function definition node +type FunctionDefinition struct { + // NodeType represents the node type (currently we only evaluate source unit node types) + NodeType string `json:"nodeType"` + // Src is the source file for this AST + Src string `json:"src"` + Name string `json:"name,omitempty"` +} + +func (s FunctionDefinition) GetNodeType() string { + return s.NodeType +} + +// ContractDefinition is the contract definition node +type ContractDefinition struct { + // NodeType represents the node type (currently we only evaluate source unit node types) + NodeType string `json:"nodeType"` + // Nodes is a list of Nodes within the AST + Nodes []Node `json:"nodes"` + // Src is the source file for this AST + Src string `json:"src"` + // CanonicalName is the name of the contract definition + CanonicalName string `json:"canonicalName,omitempty"` + // Kind is a ContractKind that represents what type of contract definition this is (contract, interface, or library) + Kind ContractKind `json:"contractKind,omitempty"` +} + +func (s ContractDefinition) GetNodeType() string { + return s.NodeType +} + +func (c *ContractDefinition) UnmarshalJSON(data []byte) error { + // Unmarshal the top-level AST into our own representation. Defer the unmarshaling of all the individual nodes until later + type Alias ContractDefinition + aux := &struct { + Nodes []json.RawMessage `json:"nodes"` + + *Alias + }{ + Alias: (*Alias)(c), + } + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + // Iterate through all the nodes of the contract definition + for _, nodeData := range aux.Nodes { + // Unmarshal the node data to retrieve the node type + var nodeType struct { + NodeType string `json:"nodeType"` + } + if err := json.Unmarshal(nodeData, &nodeType); err != nil { + return err + } + + // Unmarshal the contents of the node based on the node type + switch nodeType.NodeType { + case "FunctionDefinition": + // If this is a function definition, unmarshal it + var functionDefinition FunctionDefinition + if err := json.Unmarshal(nodeData, &functionDefinition); err != nil { + return err + } + c.Nodes = append(c.Nodes, functionDefinition) + default: + continue + } + } + + return nil + +} + +// AST is the abstract syntax tree +type AST struct { + // NodeType represents the node type (currently we only evaluate source unit node types) + NodeType string `json:"nodeType"` + // Nodes is a list of Nodes within the AST + Nodes []Node `json:"nodes"` + // Src is the source file for this AST + Src string `json:"src"` +} + +func (a *AST) UnmarshalJSON(data []byte) error { + // Unmarshal the top-level AST into our own representation. Defer the unmarshaling of all the individual nodes until later + type Alias AST + aux := &struct { + Nodes []json.RawMessage `json:"nodes"` + *Alias + }{ + Alias: (*Alias)(a), + } + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + // Iterate through all the nodes of the source unit + for _, nodeData := range aux.Nodes { + // Unmarshal the node data to retrieve the node type + var nodeType struct { + NodeType string `json:"nodeType"` + } + if err := json.Unmarshal(nodeData, &nodeType); err != nil { + return err + } + + // Unmarshal the contents of the node based on the node type + switch nodeType.NodeType { + case "ContractDefinition": + // If this is a contract definition, unmarshal it + var contractDefinition ContractDefinition + if err := json.Unmarshal(nodeData, &contractDefinition); err != nil { + return err + } + a.Nodes = append(a.Nodes, contractDefinition) + + case "FunctionDefinition": + // If this is a function definition, unmarshal it + var functionDefinition FunctionDefinition + if err := json.Unmarshal(nodeData, &functionDefinition); err != nil { + return err + } + a.Nodes = append(a.Nodes, functionDefinition) + + // TODO: Add cases for other node types as needed + default: + continue + } + + } + + return nil +} + +// GetSrcMapSourceUnitID returns the source unit ID based on the source of the AST +func GetSrcMapSourceUnitID(src string) int { + re := regexp.MustCompile(`[0-9]*:[0-9]*:([0-9]*)`) + sourceUnitCandidates := re.FindStringSubmatch(src) + + if len(sourceUnitCandidates) == 2 { // FindStringSubmatch includes the whole match as the first element + sourceUnit, err := strconv.Atoi(sourceUnitCandidates[1]) + if err == nil { + return sourceUnit + } + } + return -1 +} + +// GetSrcMapStart returns the byte offset where the function definition starts in the source file +func GetSrcMapStart(src string) int { + // 95:42:0 returns 95 + re := regexp.MustCompile(`([0-9]*):[0-9]*:[0-9]*`) + startCandidates := re.FindStringSubmatch(src) + + if len(startCandidates) == 2 { // FindStringSubmatch includes the whole match as the first element + start, err := strconv.Atoi(startCandidates[1]) + if err == nil { + return start + } + } + return -1 +} + +// GetSrcMapLength returns the length of the function definition in bytes +func GetSrcMapLength(src string) int { + // 95:42:0 returns 42 + re := regexp.MustCompile(`[0-9]*:([0-9]*):[0-9]*`) + endCandidates := re.FindStringSubmatch(src) + + if len(endCandidates) == 2 { // FindStringSubmatch includes the whole match as the first element + end, err := strconv.Atoi(endCandidates[1]) + if err == nil { + return end + } + } + return -1 +} diff --git a/compilation/types/compilation.go b/compilation/types/compilation.go index d3352cd8..94539f54 100644 --- a/compilation/types/compilation.go +++ b/compilation/types/compilation.go @@ -1,19 +1,57 @@ package types +import ( + "errors" + "fmt" + "os" +) + // Compilation represents the artifacts of a smart contract compilation. type Compilation struct { - // Sources describes the CompiledSource objects provided in a compilation, housing information regarding source - // files, mappings, ASTs, and contracts. - Sources map[string]CompiledSource + // SourcePathToArtifact maps source file paths to their corresponding SourceArtifact. + SourcePathToArtifact map[string]SourceArtifact + + // SourceIdToPath is a mapping of source unit IDs to source file paths. + SourceIdToPath map[int]string + + // SourceCode is a lookup of a source file path from SourceList to source code. This is populated by + // CacheSourceCode. + SourceCode map[string][]byte } // NewCompilation returns a new, empty Compilation object. func NewCompilation() *Compilation { // Create our compilation compilation := &Compilation{ - Sources: make(map[string]CompiledSource), + SourcePathToArtifact: make(map[string]SourceArtifact), + SourceCode: make(map[string][]byte), + SourceIdToPath: make(map[int]string), } // Return the compilation. return compilation } + +// CacheSourceCode caches source code for each CompiledSource in the compilation in the CompiledSource.SourceCode field. +// This method will attempt to populate each CompiledSource.SourceCode which has not yet been populated (is nil) before +// returning an error, if one occurs. +func (c *Compilation) CacheSourceCode() error { + // Loop through each source file, try to read it, and collect errors in an aggregated string if we encounter any. + var errStr string + for sourcePath := range c.SourcePathToArtifact { + if _, ok := c.SourceCode[sourcePath]; !ok { + sourceCodeBytes, sourceReadErr := os.ReadFile(sourcePath) + if sourceReadErr != nil { + errStr += fmt.Sprintf("source file '%v' could not be cached due to error: '%v'\n", sourcePath, sourceReadErr) + } + c.SourceCode[sourcePath] = sourceCodeBytes + } + } + + // If we have an error message, return an error encapsulating it. + if len(errStr) > 0 { + return errors.New(errStr) + } + + return nil +} diff --git a/compilation/types/compiled_contract.go b/compilation/types/compiled_contract.go index 37becf15..33693ec8 100644 --- a/compilation/types/compiled_contract.go +++ b/compilation/types/compiled_contract.go @@ -4,9 +4,10 @@ import ( "bytes" "encoding/json" "fmt" + "strings" + "github.com/ethereum/go-ethereum/accounts/abi" "golang.org/x/exp/slices" - "strings" ) // CompiledContract represents a single contract unit from a smart contract compilation. @@ -28,6 +29,9 @@ type CompiledContract struct { // SrcMapsRuntime describes the source mappings to associate source file and bytecode segments in RuntimeBytecode. SrcMapsRuntime string + + // Kind describes the kind of contract, i.e. contract, library, interface. + Kind ContractKind } // IsMatch returns a boolean indicating whether provided contract bytecode is a match to this compiled contract @@ -36,7 +40,6 @@ func (c *CompiledContract) IsMatch(initBytecode []byte, runtimeBytecode []byte) // Check if we can compare init and runtime bytecode canCompareInit := len(initBytecode) > 0 && len(c.InitBytecode) > 0 canCompareRuntime := len(runtimeBytecode) > 0 && len(c.RuntimeBytecode) > 0 - // First try matching runtime bytecode contract metadata. if canCompareRuntime { // First we try to match contracts with contract metadata embedded within the smart contract. diff --git a/compilation/types/compiled_source.go b/compilation/types/compiled_source.go index 9adbd283..2950a74b 100644 --- a/compilation/types/compiled_source.go +++ b/compilation/types/compiled_source.go @@ -1,8 +1,8 @@ package types -// CompiledSource represents a source descriptor for a smart contract compilation, including AST and contained +// SourceArtifact represents a source descriptor for a smart contract compilation, including AST and contained // CompiledContract instances. -type CompiledSource struct { +type SourceArtifact struct { // Ast describes the abstract syntax tree artifact of a source file compilation, providing tokenization of the // source file components. Ast any @@ -10,4 +10,7 @@ type CompiledSource struct { // Contracts describes a mapping of contract names to contract definition structures which are contained within // the source. Contracts map[string]CompiledContract + + // SourceUnitId refers to the identifier of the source unit within the compilation. + SourceUnitId int } diff --git a/compilation/types/contract_metadata.go b/compilation/types/contract_metadata.go index 0c3c885e..0a1e0d2b 100644 --- a/compilation/types/contract_metadata.go +++ b/compilation/types/contract_metadata.go @@ -2,6 +2,7 @@ package types import ( "bytes" + "github.com/fxamacker/cbor" ) @@ -46,6 +47,22 @@ func ExtractContractMetadata(bytecode []byte) *ContractMetadata { return nil } +// RemoveContractMetadata takes bytecode and attempts to detect contract metadata within it, splitting it where the +// metadata is found. +// If contract metadata could be located, this method returns the bytecode solely (no contract metadata, and no +// constructor arguments, which tend to follow). +// Otherwise, this method returns the provided input as-is. +func RemoveContractMetadata(bytecode []byte) []byte { + for _, metadataHashPrefix := range metadataHashPrefixes { + metadataOffset := bytes.LastIndex(bytecode, metadataHashPrefix[:]) + + if metadataOffset != -1 { + return bytecode[:metadataOffset-1] + } + } + return bytecode +} + // ExtractBytecodeHash extracts the bytecode hash from given contract metadata and returns the bytes representing the // hash. If it could not be detected or extracted, nil is returned. func (m ContractMetadata) ExtractBytecodeHash() []byte { diff --git a/compilation/types/source_maps.go b/compilation/types/source_maps.go new file mode 100644 index 00000000..57da3fb2 --- /dev/null +++ b/compilation/types/source_maps.go @@ -0,0 +1,184 @@ +package types + +import ( + "fmt" + "strconv" + "strings" + + "github.com/ethereum/go-ethereum/core/vm" +) + +// Reference: Source mapping is performed according to the rules specified in solidity documentation: +// https://docs.soliditylang.org/en/latest/internals/source_mappings.html + +// SourceMapJumpType describes the type of jump operation occurring within a SourceMapElement if the instruction +// is jumping. +type SourceMapJumpType string + +const ( + // SourceMapJumpTypeNone indicates no jump occurred. + SourceMapJumpTypeNone SourceMapJumpType = "" + + // SourceMapJumpTypeJumpIn indicates a jump into a function occurred. + SourceMapJumpTypeJumpIn SourceMapJumpType = "i" + + // SourceMapJumpTypeJumpOut indicates a return from a function occurred. + SourceMapJumpTypeJumpOut SourceMapJumpType = "o" + + // SourceMapJumpTypeJumpWithin indicates a jump occurred within the same function, e.g. for loops. + SourceMapJumpTypeJumpWithin SourceMapJumpType = "-" +) + +// SourceMap describes a list of elements which correspond to instruction indexes in compiled bytecode, describing +// which source files and the start/end range of the source code which the instruction maps to. +type SourceMap []SourceMapElement + +// SourceMapElement describes an individual element of a source mapping output by the compiler. +// The index of each element in a source map corresponds to an instruction index (not to be mistaken with offset). +// It describes portion of a source file the instruction references. +type SourceMapElement struct { + // Index refers to the index of the SourceMapElement within its parent SourceMap. This is not actually a field + // saved in the SourceMap, but is provided for convenience so the user may remove SourceMapElement objects during + // analysis. + Index int + + // Offset refers to the byte offset which marks the start of the source range the instruction maps to. + Offset int + + // Length refers to the byte length of the source range the instruction maps to. + Length int + + // SourceUnitID refers to an identifier for the CompiledSource file which houses the relevant source code. + SourceUnitID int + + // JumpType refers to the SourceMapJumpType which provides information about any type of jump that occurred. + JumpType SourceMapJumpType + + // ModifierDepth refers to the depth in which code has executed a modifier function. This is used to assist + // debuggers, e.g. understanding if the same modifier is re-used multiple times in a call. + ModifierDepth int +} + +// ParseSourceMap takes a source mapping string returned by the compiler and parses it into an array of +// SourceMapElement objects. +// Returns the list of SourceMapElement objects. +func ParseSourceMap(sourceMapStr string) (SourceMap, error) { + // Define our variables to store our results in + var ( + sourceMap SourceMap + err error + ) + + // If our provided source map string is empty, there is no work to be done. + if len(sourceMapStr) == 0 { + return sourceMap, nil + } + + // Separate all the individual source mapping elements + elements := strings.Split(sourceMapStr, ";") + + // We use this variable to store "the previous element" because the way + // the source mapping works when an element or field is "empty" + // the value of the previous element is used. + current := SourceMapElement{ + Index: -1, + Offset: -1, + Length: -1, + SourceUnitID: -1, + JumpType: "", + ModifierDepth: 0, + } + + // Iterate over all elements split from the source mapping + for _, element := range elements { + // Set the current index + current.Index = len(sourceMap) + + // If the element is empty, we use the previous one + if len(element) == 0 { + sourceMap = append(sourceMap, current) + continue + } + + // Split the element fields apart + fields := strings.Split(element, ":") + + // If the source range start offset exists, update our current element data. + if len(fields) > 0 && fields[0] != "" { + current.Offset, err = strconv.Atoi(fields[0]) + if err != nil { + return nil, err + } + } + + // If the source range length exists, update our current element data. + if len(fields) > 1 && fields[1] != "" { + current.Length, err = strconv.Atoi(fields[1]) + if err != nil { + return nil, err + } + } + + // If the source file identifier exists, update our current element data. + if len(fields) > 2 && fields[2] != "" { + current.SourceUnitID, err = strconv.Atoi(fields[2]) + if err != nil { + return nil, err + } + } + + // If the jump type information exists, update our current element data. + if len(fields) > 3 && fields[3] != "" { + current.JumpType = SourceMapJumpType(fields[3]) + } + + // If the modifier call depth exists, update our current element data. + if len(fields) > 4 && fields[4] != "" { + current.ModifierDepth, err = strconv.Atoi(fields[4]) + if err != nil { + return nil, err + } + } + + // Append our element to the map + sourceMap = append(sourceMap, current) + } + + // Return the resulting map + return sourceMap, nil +} + +// GetInstructionIndexToOffsetLookup obtains a slice where each index of the slice corresponds to an instruction index, +// and the element of the slice represents the instruction offset. +// Returns the slice lookup, or an error if one occurs. +func (s SourceMap) GetInstructionIndexToOffsetLookup(bytecode []byte) ([]int, error) { + // Create our resulting lookup + indexToOffsetLookup := make([]int, len(s)) + + // Loop through all byte code + currentOffset := 0 + for i := 0; i < len(indexToOffsetLookup); i++ { + // If we're going to read out of bounds, return an error. + if currentOffset >= len(bytecode) { + return nil, fmt.Errorf("failed to obtain a lookup of instruction indexes to offsets. instruction index: %v, current offset: %v, length: %v", i, currentOffset, len(bytecode)) + } + + // Obtain the indexed instruction and add the current offset to our lookup at this index. + op := vm.OpCode(bytecode[currentOffset]) + indexToOffsetLookup[i] = currentOffset + + // Next, calculate the length of data that follows this instruction. + operandCount := 0 + if op.IsPush() { + if op == vm.PUSH0 { + operandCount = 0 + } else { + operandCount = int(op) - int(vm.PUSH1) + 1 + } + } + + // Advance the offset past this instruction and its operands. + currentOffset += operandCount + 1 + } + return indexToOffsetLookup, nil +} diff --git a/docs/book.toml b/docs/book.toml new file mode 100644 index 00000000..c09f7b92 --- /dev/null +++ b/docs/book.toml @@ -0,0 +1,17 @@ +[book] +title = "medusa" +authors = ["Trail of Bits"] +language = "en" +multilingual = false +src = "src" +description = "This repository, brought to you by Trail of Bits, contains the documentation files for the medusa fuzzer." + +[output.html] +git-repository-url = "https://github.com/crytic/medusa" +edit-url-template = "https://github.com/crytic/medusa/edit/master/docs/{path}" +additional-css = ["src/static/custom.css"] +default-theme = "light" + +[output.html.fold] +enable = true +level = 1 \ No newline at end of file diff --git a/docs/src/README.md b/docs/src/README.md new file mode 100644 index 00000000..e9ceb477 --- /dev/null +++ b/docs/src/README.md @@ -0,0 +1,18 @@ +![medusa_logo](./static/medusa_logo.png) + +`medusa` is a cross-platform go-ethereum-based smart contract fuzzer inspired by Echidna. It provides parallelized fuzz +testing of smart contracts through CLI, or its Go API that allows custom user-extended testing methodology. + +## Table of Contents + +- [Getting Started](./getting_started/installation.md): Learn how to install `medusa` and how to set it up for your first project. +- [Project Configuration](./project_configuration/overview.md): Learn how to set up `medusa` for your project as well as + the vast number of configuration options that can be set up based on your project needs. +- [Command Line Interface](./cli/overview.md): Learn how to use `medusa`'s CLI. +- [Writing Tests](./testing/overview.md): Learn how to write tests with `medusa` +- [API (WIP)](./api/api_overview.md): Learn about `medusa`'s Go API that can be used to perform advanced testing + methodologies and extend `medusa`'s capabilities. +- Appendices + - [Cheatcodes](./cheatcodes/cheatcodes_overview.md): Learn about the various cheatcodes that are supported by `medusa`. + - [Console Logging](./console_logging.md): Learn about how to use `console.log` with `medusa`. + - [FAQ](./faq.md) diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md new file mode 100644 index 00000000..383a71ef --- /dev/null +++ b/docs/src/SUMMARY.md @@ -0,0 +1,69 @@ +# Summary + +[Introduction](./README.md) + +# Getting Started + +- [Installation](./getting_started/installation.md) +- [First Steps](./getting_started/first_steps.md) + +# Project Configuration + +- [Configuration Overview](project_configuration/overview.md) +- [Fuzzing Configuration](project_configuration/fuzzing_config.md) +- [Testing Configuration](project_configuration/testing_config.md) +- [Chain Configuration](project_configuration/chain_config.md) +- [Compilation Configuration](project_configuration/compilation_config.md) +- [Logging Configuration](project_configuration/logging_config.md) + +# Command Line Interface (CLI) + +- [CLI Overview](./cli/overview.md) +- [init](./cli/init.md) +- [fuzz](./cli/fuzz.md) +- [completion](./cli/completion.md) + +# Writing Tests + +- [Testing Overview](./testing/overview.md) +- [The Fuzzing Lifecycle](./testing/fuzzing_lifecycle.md) +- [Types of Invariants](./testing/invariants.md) +- [Writing Function-Level Invariants](./testing/writing-function-level-invariants.md) +- [Writing System-Level Invariants (WIP)](./testing/writing-system-level-invariants.md) +- [Coverage Reports (WIP)](./testing/coverage_reports.md) + +# API + +- [API Overview (WIP)](api/api_overview.md) + +# Appendices + +- [Cheatcodes](cheatcodes/cheatcodes_overview.md) + - [warp](./cheatcodes/warp.md) + - [roll](./cheatcodes/roll.md) + - [fee](./cheatcodes/fee.md) + - [difficulty](./cheatcodes/difficulty.md) + - [chainId](./cheatcodes/chain_id.md) + - [store](./cheatcodes/store.md) + - [load](./cheatcodes/load.md) + - [etch](./cheatcodes/etch.md) + - [deal](./cheatcodes/deal.md) + - [snapshot](./cheatcodes/snapshot.md) + - [getNonce](./cheatcodes/get_nonce.md) + - [setNonce](./cheatcodes/set_nonce.md) + - [coinbase](./cheatcodes/coinbase.md) + - [prank](./cheatcodes/prank.md) + - [prankHere](./cheatcodes/prank_here.md) + - [ffi](./cheatcodes/ffi.md) + - [addr](./cheatcodes/addr.md) + - [sign](./cheatcodes/sign.md) + - [toString](./cheatcodes/to_string.md) + - [parseBytes](./cheatcodes/parse_bytes.md) + - [parseBytes32](./cheatcodes/parse_bytes32.md) + - [parseInt](./cheatcodes/parse_int.md) + - [parseUint](./cheatcodes/parse_uint.md) + - [parseBool](./cheatcodes/parse_bool.md) + - [parseAddress](./cheatcodes/parse_address.md) +- [Console Logging](./console_logging.md) + +[FAQ](./faq.md) diff --git a/docs/src/advanced.md b/docs/src/advanced.md new file mode 100644 index 00000000..a4a1f87f --- /dev/null +++ b/docs/src/advanced.md @@ -0,0 +1,16 @@ +> **Definition**: Stateful fuzzing is the process of maintaining EVM state across multiple fuzzed transactions. + +Stateful fuzzing is an incredibly powerful feature because it allows medusa to test your system **end-to-end**. Let's +take, for example, a staking system where you have the ability to `deposit`, `stake`, `unstake`, and `withdraw`. Because +medusa can execute an array of transactions, medusa can call [`deposit`, `stake`, `unstake`, `withdraw`] inorder and test the +whole system in one fell swoop. It is very important to note that medusa was not _forced_ to call those functions in +sequence. Medusa, over time, will identify that calling deposit allows it to stake tokens and having a staked balance +allows it to unstake, and so on. + +In contrast, having a call sequence length of 1 is called **stateless fuzzing**. + +> **Definition**: Stateless fuzzing is the process of executing a single transaction before resetting the EVM state. + +Stateless fuzzing is useful for arithmetic libraries or isolated functions where state does not need to be maintained +across transactions. Stateless fuzzing, although faster, is not useful for larger systems that have many code paths with +nuanced and complex invariants. diff --git a/docs/src/api/api_overview.md b/docs/src/api/api_overview.md new file mode 100644 index 00000000..ab3e6dee --- /dev/null +++ b/docs/src/api/api_overview.md @@ -0,0 +1,185 @@ +# API Overview (WIP) + +`medusa` offers a lower level API to hook into various parts of the fuzzer, its workers, and underlying chains. Although assertion and property testing are two built-in testing providers, they are implementing using events and hooks offered throughout the `Fuzzer`, `FuzzerWorker`(s), and underlying `TestChain`. These same hooks can be used by external developers wishing to implement their own customing testing methodology. In the sections below, we explore some of the relevant components throughout `medusa`, their events/hooks, an example of creating custom testing methodology with it. + +## Component overview + +A rudimentary description of the objects/providers and their roles are explained below. + +### Data types + +- `ProjectConfig`: This defines the configuration for the Fuzzer, including the targets to compile, deploy, and how to fuzz or test them. + +- `ValueSet`: This is an object that acts as a dictionary of values, used in mutation operations. It is populated at compilation time with some rudimentary static analysis. + +- `Contract`: Can be thought of as a "contract definition", it is a data type which stores the name of the contract, and a reference to the underlying `CompiledContract`, a definition derived from compilation, containing the bytecode, source maps, ABI, etc. + +- `CallSequence`: This represents a list of `CallSequenceElement`s, which define a transaction to send, the suggested block number and timestamp delay to use, and stores a reference to the block/transaction/results when it is executed (for later querying in tests). They are used to generate and execute transaction sequences in the fuzzer. + +- `CoverageMaps` define a list of `CoverageMap` objects, which record all instruction offsets executed for a given contract address and code hash. + +- `TestCase` defines the interface for a test that the `Fuzzer` will track. It simply defines a name, ID, status (not started, running, passed, failed) and message for the `Fuzzer`. + +### Providers + +- `ValueGenerator`: This is an object that provides methods to generate values of different kinds for transactions. Examples include the `RandomValueGenerator` and superceding `MutationalValueGenerator`. They are provided a `ValueSet` by their worker, which they may use in generation operations. + +- `TestChain`: This is a fake chain that operates on fake block structures created for the purpose of testing. Rather than operating on `types.Transaction` (which requires signing), it operates on `core.Message`s, which are derived from transactions and simply allow you to set the `sender` field. It is responsible for: + + - Maintaining state of the chain (blocks, transactions in them, results/receipts) + - Providing methods to create blocks, add transactions to them, commit them to chain, revert to previous block numbers. + - Allowing spoofing of block number and timestamp (commiting block number 1, then 50, jumping 49 blocks ahead), while simulating the existence of intermediate blocks. + - Provides methods to add tracers such as `evm.Logger` (standard go-ethereum tracers) or extend them with an additional interface (`TestChainTracer`) to also store any captured traced information in the execution results. This allows you to trace EVM execution for certain conditions, store results, and query them at a later time for testing. + +- `Fuzzer`: This is the main provider for the fuzzing process. It takes a `ProjectConfig` and is responsible for: + + - Housing data shared between the `FuzzerWorker`s such as contract definitions, a `ValueSet` derived from compilation to use in value generation, the reference to `Corpus`, the `CoverageMaps` representing all coverage achieved, as well as maintaining `TestCase`s registered to it and printing their results. + - Compiling the targets defined by the project config and setting up state. + - Provides methods to start/stop the fuzzing process, add additional compilation targets, access the initial value set prior to fuzzing start, access corpus, config, register new test cases and report them finished. + - Starts the fuzzing process by creating a "base" `TestChain`, deploys compiled contracts, replays all corpus sequences to measure existing coverage from previous fuzzing campaign, then spawns as many `FuzzerWorker`s as configured on their own goroutines ("threads") and passes them the "base" `TestChain` (which they clone) to begin the fuzzing operation. + - Respawns `FuzzerWorker`s when they hit a config-defined reset limit for the amount of transaction sequences they should process before destroying themselves and freeing memory. + - Maintains the context for when fuzzing should stop, which all workers track. + +- `FuzzerWorker`: This describes an object spawned by the `Fuzzer` with a given "base" `TestChain` with target contracts already deployed, ready to be fuzzed. It clones this chain, then is called upon to begin creating fuzz transactions. It is responsible for: + - Maintaining a reference to the parent `Fuzzer` for any shared information between it and other workers (`Corpus`, total `CoverageMaps`, contract definitions to match deployment's bytecode, etc) + - Maintaining its own `TestChain` to run fuzzed transaction sequences. + - Maintaining its own `ValueSet` which derives from the `Fuzzer`'s `ValueSet` (populated by compilation or user-provided values through API), as each `FuzzerWorker` may populate its `ValueSet` with different runtime values depending on their own chain state. + - Spawning a `ValueGenerator` which uses the `ValueSet`, to generate values used to construct fuzzed transaction sequences. + - Most importantly, it continuously: + - Generates `CallSequence`s (a series of transactions), plays them on its `TestChain`, records the results of in each `CallSequenceElement`, and calls abstract/hookable "test functions" to indicate they should perform post-tx tests (for which they can return requests for a shrunk test sequence). + - Updates the total `CoverageMaps` and `Corpus` with the current `CallSequence` if the most recent call increased coverage. + - Processes any shrink requests from the previous step (shrink requests can define arbitrary criteria for shrinking). + - Eventually, hits the config-defined reset limit for how many sequences it should process, and destroys itself to free all memory, expecting the `Fuzzer` to respawn another in its place. + +## Creating a project configuration + +`medusa` is config-driven. To begin a fuzzing campaign on an API level, you must first define a project configuration so the fuzzer knows what contracts to compile, deploy, and how it should operate. + +When using `medusa` over command-line, it operates a project config similarly (see [docs](https://github.com/trailofbits/medusa/wiki/Project-Configuration) or [example](https://github.com/trailofbits/medusa/wiki/Example-Project-Configuration-File)). Similarly, interfacing with a `Fuzzer` requires a `ProjectConfig` object. After importing `medusa` into your Go project, you can create one like this: + +```go +// Initialize a default project config with using crytic-compile as a compilation platform, and set the target it should compile. +projectConfig := config.GetDefaultProjectConfig("crytic-compile") +err := projectConfig.Compilation.SetTarget("contract.sol") +if err != nil { + return err +} + +// You can edit any of the values as you please. +projectConfig.Fuzzing.Workers = 20 +projectConfig.Fuzzing.DeploymentOrder = []string{"TestContract1", "TestContract2"} +``` + +You may also instantiate the whole config in-line with all the fields you'd like, setting the underlying platform config yourself. + +> **NOTE**: The `CompilationConfig` and `PlatformConfig` WILL BE deprecated and replaced with something more intuitive in the future, as the `compilation` package has not been updated since the project's inception, prior to the release of generics in go 1.18. + +## Creating and starting the fuzzer + +After you have created a `ProjectConfig`, you can create a new `Fuzzer` with it, and tell it to start: + +```go + // Create our fuzzer + fuzzer, err := fuzzing.NewFuzzer(*projectConfig) + if err != nil { + return err + } + + // Start the fuzzer + err = fuzzer.Start() + if err != nil { + return err + } + + // Fetch test cases results + testCases := fuzzer.TestCases() +[...] +``` + +> **Note**: `Fuzzer.Start()` is a blocking operation. If you wish to stop, you must define a TestLimit or Timeout in your config. Otherwise start it on another goroutine and call `Fuzzer.Stop()` to stop it. + +## Events/Hooks + +### Events + +Now it may be the case that you wish to hook the `Fuzzer`, `FuzzerWorker`, or `TestChain` to provide your own functionality. You can add your own testing methodology, and even power it with your own low-level EVM execution tracers to store and query results about each call. + +There are a few events/hooks that may be useful of the bat: + +The `Fuzzer` maintains event emitters for the following events under `Fuzzer.Events.*`: + +- `FuzzerStartingEvent`: Indicates a `Fuzzer` is starting and provides a reference to it. + +- `FuzzerStoppingEvent`: Indicates a `Fuzzer` has just stopped all workers and is about to print results and exit. + +- `FuzzerWorkerCreatedEvent`: Indicates a `FuzzerWorker` was created by a `Fuzzer`. It provides a reference to the `FuzzerWorker` spawned. The parent `Fuzzer` can be accessed through `FuzzerWorker.Fuzzer()`. +- `FuzzerWorkerDestroyedEvent`: Indicates a `FuzzerWorker` was destroyed. This can happen either due to hitting the config-defined worker reset limit or the fuzzing operation stopping. It provides a reference to the destroyed worker (for reference, though this should not be stored, to allow memory to free). + +The `FuzzerWorker` maintains event emiters for the following events under `FuzzerWorker.Events.*`: + +- `FuzzerWorkerChainCreatedEvent`: This indicates the `FuzzerWorker` is about to begin working and has created its chain (but not yet copied data from the "base" `TestChain` the `Fuzzer` provided). This offers an opportunity to attach tracers for calls made during chain setup. It provides a reference to the `FuzzerWorker` and its underlying `TestChain`. + +- `FuzzerWorkerChainSetupEvent`: This indicates the `FuzzerWorker` is about to begin working and has both created its chain, and copied data from the "base" `TestChain`, so the initial deployment of contracts is complete and fuzzing is ready to begin. It provides a reference to the `FuzzerWorker` and its underlying `TestChain`. + +- `CallSequenceTesting`: This indicates a new `CallSequence` is about to be generated and tested by the `FuzzerWorker`. It provides a reference to the `FuzzerWorker`. + +- `CallSequenceTested`: This indicates a `CallSequence` was just tested by the `FuzzerWorker`. It provides a reference to the `FuzzerWorker`. + +- `FuzzerWorkerContractAddedEvent`: This indicates a contract was added on the `FuzzerWorker`'s underlying `TestChain`. This event is emitted when the contract byte code is resolved to a `Contract` definition known by the `Fuzzer`. It may be emitted due to a contract deployment, or the reverting of a block which caused a SELFDESTRUCT. It provides a reference to the `FuzzerWorker`, the deployed contract address, and the `Contract` definition that it was matched to. + +- `FuzzerWorkerContractDeletedEvent`: This indicates a contract was removed on the `FuzzerWorker`'s underlying `TestChain`. It may be emitted due to a contract deployment which was reverted, or a SELFDESTRUCT operation. It provides a reference to the `FuzzerWorker`, the deployed contract address, and the `Contract` definition that it was matched to. + +The `TestChain` maintains event emitters for the following events under `TestChain.Events.*`: + +- `PendingBlockCreatedEvent`: This indicates a new block is being created but has not yet been committed to the chain. The block is empty at this point but will likely be populated. It provides a reference to the `Block` and `TestChain`. + +- `PendingBlockAddedTxEvent`: This indicates a pending block which has not yet been commited to chain has added a transaction to it, as it is being constructed. It provides a reference to the `Block`, `TestChain`, and index of the transaction in the `Block`. + +- `PendingBlockCommittedEvent`: This indicates a pending block was committed to chain as the new head. It provides a reference to the `Block` and `TestChain`. + +- `PendingBlockDiscardedEvent`: This indicates a pending block was not committed to chain and was instead discarded. + +- `BlocksRemovedEvent`: This indicates blocks were removed from the chain. This happens when a chain revert to a previous block number is invoked. It provides a reference to the `Block` and `TestChain`. + +- `ContractDeploymentsAddedEvent`: This indicates a new contract deployment was detected on chain. It provides a reference to the `TestChain`, as well as information captured about the bytecode. This may be triggered on contract deployment, or the reverting of a SELFDESTRUCT operation. + +- `ContractDeploymentsRemovedEvent`: This indicates a previously deployed contract deployment was removed from chain. It provides a reference to the `TestChain`, as well as information captured about the bytecode. This may be triggered on revert of a contract deployment, or a SELFDESTRUCT operation. + +### Hooks + +The `Fuzzer` maintains hooks for some of its functionality under `Fuzzer.Hooks.*`: + +- `NewValueGeneratorFunc`: This method is used to create a `ValueGenerator` for each `FuzzerWorker`. By default, this uses a `MutationalValueGenerator` constructed with the provided `ValueSet`. It can be replaced to provide a custom `ValueGenerator`. + +- `TestChainSetupFunc`: This method is used to set up a chain's initial state before fuzzing. By default, this method deploys all contracts compiled and marked for deployment in the `ProjectConfig` provided to the `Fuzzer`. It only deploys contracts if they have no constructor arguments. This can be replaced with your own method to do custom deployments. + + - **Note**: We do not recommend replacing this for now, as the `Contract` definitions may not be known to the `Fuzzer`. Additionally, `SenderAddresses` and `DeployerAddress` are the only addresses funded at genesis. This will be updated at a later time. + +- `CallSequenceTestFuncs`: This is a list of functions which are called after each `FuzzerWorker` executed another call in its current `CallSequence`. It takes the `FuzzerWorker` and `CallSequence` as input, and is expected to return a list of `ShinkRequest`s if some interesting result was found and we wish for the `FuzzerWorker` to shrink the sequence. You can add a function here as part of custom post-call testing methodology to check if some property was violated, then request a shrunken sequence for it with arbitrary criteria to verify the shrunk sequence satisfies your requirements (e.g. violating the same property again). + +### Extending testing methodology + +Although we will build out guidance on how you can solve different challenges or employ different tests with this lower level API, we intend to wrap some of this into a higher level API that allows testing complex post-call/event conditions with just a few lines of code externally. The lower level API will serve for more granular control across the system, and fine tuned optimizations. + +To ensure testing methodology was agnostic and extensible in `medusa`, we note that both assertion and property testing is implemented through the abovementioned events and hooks. When a higher level API is introduced, we intend to migrate these test case providers to that API. + +For now, the built-in `AssertionTestCaseProvider` (found [here](https://github.com/trailofbits/medusa/blob/8036697794481b7bf9fa78c922ec7fa6a8a3005c/fuzzing/test_case_assertion_provider.go)) and its test cases (found [here](https://github.com/trailofbits/medusa/blob/8036697794481b7bf9fa78c922ec7fa6a8a3005c/fuzzing/test_case_assertion.go)) are an example of code that _could_ exist externally outside of `medusa`, but plug into it to offer extended testing methodology. Although it makes use of some private variables, they can be replaced with public getter functions that are available. As such, if assertion testing didn't exist in `medusa` natively, you could've implemented it yourself externally! + +In the end, using it would look something like this: + +```go + // Create our fuzzer + fuzzer, err := fuzzing.NewFuzzer(*projectConfig) + if err != nil { + return err + } + + // Attach our custom test case provider + attachAssertionTestCaseProvider(fuzzer) + + // Start the fuzzer + err = fuzzer.Start() + if err != nil { + return err + } +``` diff --git a/docs/src/cheatcodes/addr.md b/docs/src/cheatcodes/addr.md new file mode 100644 index 00000000..5fe04c04 --- /dev/null +++ b/docs/src/cheatcodes/addr.md @@ -0,0 +1,24 @@ +# `addr` + +## Description + +The `addr` cheatcode will compute the address for a given private key. + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Test with random private key +uint256 pkOne = 0x6df21769a2082e03f7e21f6395561279e9a7feb846b2bf740798c794ad196e00; +address addrOne = 0xdf8Ef652AdE0FA4790843a726164df8cf8649339; +address result = cheats.addr(pkOne); +assert(result == addrOne); +``` + +## Function Signature + +```solidity +function addr(uint256 privateKey) external returns (address); +``` diff --git a/docs/src/cheatcodes/chain_id.md b/docs/src/cheatcodes/chain_id.md new file mode 100644 index 00000000..ae8fedeb --- /dev/null +++ b/docs/src/cheatcodes/chain_id.md @@ -0,0 +1,22 @@ +# `chainId` + +## Description + +The `chainId` cheatcode will set the `block.chainid` + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Change value and verify. +cheats.chainId(777123); +assert(block.chainid == 777123); +``` + +## Function Signature + +```solidity +function chainId(uint256) external; +``` diff --git a/docs/src/cheatcodes/cheatcodes_overview.md b/docs/src/cheatcodes/cheatcodes_overview.md new file mode 100644 index 00000000..ec2dcf46 --- /dev/null +++ b/docs/src/cheatcodes/cheatcodes_overview.md @@ -0,0 +1,118 @@ +# Cheatcodes Overview + +Cheatcodes allow users to manipulate EVM state, blockchain behavior, provide easy ways to manipulate data, and much more. +The cheatcode contract is deployed at `0x7109709ECfa91a80626fF3989D68f67F5b1DD12D`. + +## Cheatcode Interface + +The following interface must be added to your Solidity project if you wish to use cheatcodes. Note that if you use Foundry +as your compilation platform that the cheatcode interface is already provided [here](https://book.getfoundry.sh/reference/forge-std/#forge-stds-test). +However, it is important to note that medusa does not support all the cheatcodes provided out-of-box +by Foundry (see below for supported cheatcodes). + +```solidity +interface StdCheats { + // Set block.timestamp + function warp(uint256) external; + + // Set block.number + function roll(uint256) external; + + // Set block.basefee + function fee(uint256) external; + + // Set block.difficulty and block.prevrandao + function difficulty(uint256) external; + + // Set block.chainid + function chainId(uint256) external; + + // Sets the block.coinbase + function coinbase(address) external; + + // Loads a storage slot from an address + function load(address account, bytes32 slot) external returns (bytes32); + + // Stores a value to an address' storage slot + function store(address account, bytes32 slot, bytes32 value) external; + + // Sets the *next* call's msg.sender to be the input address + function prank(address) external; + + // Set msg.sender to the input address until the current call exits + function prankHere(address) external; + + // Sets an address' balance + function deal(address who, uint256 newBalance) external; + + // Sets an address' code + function etch(address who, bytes calldata code) external; + + // Signs data + function sign(uint256 privateKey, bytes32 digest) + external + returns (uint8 v, bytes32 r, bytes32 s); + + // Computes address for a given private key + function addr(uint256 privateKey) external returns (address); + + // Gets the nonce of an account + function getNonce(address account) external returns (uint64); + + // Sets the nonce of an account + // The new nonce must be higher than the current nonce of the account + function setNonce(address account, uint64 nonce) external; + + // Performs a foreign function call via terminal + function ffi(string[] calldata) external returns (bytes memory); + + // Take a snapshot of the current state of the EVM + function snapshot() external returns (uint256); + + // Revert state back to a snapshot + function revertTo(uint256) external returns (bool); + + // Convert Solidity types to strings + function toString(address) external returns(string memory); + function toString(bytes calldata) external returns(string memory); + function toString(bytes32) external returns(string memory); + function toString(bool) external returns(string memory); + function toString(uint256) external returns(string memory); + function toString(int256) external returns(string memory); + + // Convert strings into Solidity types + function parseBytes(string memory) external returns(bytes memory); + function parseBytes32(string memory) external returns(bytes32); + function parseAddress(string memory) external returns(address); + function parseUint(string memory)external returns(uint256); + function parseInt(string memory) external returns(int256); + function parseBool(string memory) external returns(bool); +} +``` + +# Using cheatcodes + +Below is an example snippet of how you would import the cheatcode interface into your project and use it. + +```solidity +// Assuming cheatcode interface is in the same directory +import "./IStdCheats.sol"; + +// MyContract will utilize the cheatcode interface +contract MyContract { + // Set up reference to cheatcode contract + IStdCheats cheats = IStdCheats(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + // This is a test function that will set the msg.sender's nonce to the provided input argument + function testFunc(uint256 _x) public { + // Ensure that the input argument is greater than msg.sender's current nonce + require(_x > cheats.getNonce(msg.sender)); + + // Set sender's nonce + cheats.setNonce(msg.sender, x); + + // Assert that the nonce has been correctly updated + assert(cheats.getNonce(msg.sender) == x); + } +} +``` diff --git a/docs/src/cheatcodes/coinbase.md b/docs/src/cheatcodes/coinbase.md new file mode 100644 index 00000000..a0ab068e --- /dev/null +++ b/docs/src/cheatcodes/coinbase.md @@ -0,0 +1,22 @@ +# `coinbase` + +## Description + +The `coinbase` cheatcode will set the `block.coinbase` + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Change value and verify. +cheats.coinbase(address(7)); +assert(block.coinbase == address(7)); +``` + +## Function Signature + +```solidity +function coinbase(address) external; +``` diff --git a/docs/src/cheatcodes/deal.md b/docs/src/cheatcodes/deal.md new file mode 100644 index 00000000..4518f61f --- /dev/null +++ b/docs/src/cheatcodes/deal.md @@ -0,0 +1,23 @@ +# `deal` + +## Description + +The `deal` cheatcode will set the ETH balance of address `who` to `newBalance` + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Change value and verify. +address acc = address(777); +cheats.deal(acc, x); +assert(acc.balance == x); +``` + +## Function Signature + +```solidity +function deal(address who, uint256 newBalance) external; +``` diff --git a/docs/src/cheatcodes/difficulty.md b/docs/src/cheatcodes/difficulty.md new file mode 100644 index 00000000..fa901849 --- /dev/null +++ b/docs/src/cheatcodes/difficulty.md @@ -0,0 +1,25 @@ +# `difficulty` + +## Description + +The `difficulty` cheatcode will set the `block.difficulty` and the `block.prevrandao` value. At the moment, both values +are changed since the cheatcode does not check what EVM version is running. + +Note that this behavior will change in the future. + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Change value and verify. +cheats.difficulty(x); +assert(block.difficulty == x); +``` + +## Function Signature + +```solidity +function difficulty(uint256) external; +``` diff --git a/docs/src/cheatcodes/etch.md b/docs/src/cheatcodes/etch.md new file mode 100644 index 00000000..4ba045bf --- /dev/null +++ b/docs/src/cheatcodes/etch.md @@ -0,0 +1,29 @@ +# `etch` + +## Description + +The `etch` cheatcode will set the `who` address's bytecode to `code`. + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Obtain our original code hash for an account. +address acc = address(777); +bytes32 originalCodeHash; +assembly { originalCodeHash := extcodehash(acc) } + +// Change value and verify. +cheats.etch(acc, address(someContract).code); +bytes32 updatedCodeHash; +assembly { updatedCodeHash := extcodehash(acc) } +assert(originalCodeHash != updatedCodeHash); +``` + +## Function Signature + +```solidity +function etch(address who, bytes calldata code) external; +``` diff --git a/docs/src/cheatcodes/fee.md b/docs/src/cheatcodes/fee.md new file mode 100644 index 00000000..a4c6f115 --- /dev/null +++ b/docs/src/cheatcodes/fee.md @@ -0,0 +1,22 @@ +# `fee` + +## Description + +The `fee` cheatcode will set the `block.basefee`. + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Change value and verify. +cheats.fee(7); +assert(block.basefee == 7); +``` + +## Function Signature + +```solidity +function fee(uint256) external; +``` diff --git a/docs/src/cheatcodes/ffi.md b/docs/src/cheatcodes/ffi.md new file mode 100644 index 00000000..562528ad --- /dev/null +++ b/docs/src/cheatcodes/ffi.md @@ -0,0 +1,58 @@ +# `ffi` + +## Description + +The `ffi` cheatcode is used to call an arbitrary command on your host OS. Note that `ffi` must be enabled via the project +configuration file by setting `fuzzing.chainConfig.cheatCodes.enableFFI` to `true`. + +Note that enabling `ffi` allows anyone to execute arbitrary commands on devices that run the fuzz tests which may +become a security risk. + +Please review [Foundry's documentation on the `ffi` cheatcode](https://book.getfoundry.sh/cheatcodes/ffi#tips) for general tips. + +## Example with ABI-encoded hex + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Create command +string[] memory inputs = new string[](3); +inputs[0] = "echo"; +inputs[1] = "-n"; +// Encoded "hello" +inputs[2] = "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000568656C6C6F000000000000000000000000000000000000000000000000000000"; + +// Call cheats.ffi +bytes memory res = cheats.ffi(inputs); + +// ABI decode +string memory output = abi.decode(res, (string)); +assert(keccak256(abi.encodePacked(output)) == keccak256(abi.encodePacked("hello"))); +``` + +## Example with UTF8 encoding + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Create command +string[] memory inputs = new string[](3); +inputs[0] = "echo"; +inputs[1] = "-n"; +inputs[2] = "hello"; + +// Call cheats.ffi +bytes memory res = cheats.ffi(inputs); + +// Convert to UTF-8 string +string memory output = string(res); +assert(keccak256(abi.encodePacked(output)) == keccak256(abi.encodePacked("hello"))); +``` + +## Function Signature + +```solidity +function ffi(string[] calldata) external returns (bytes memory); +``` diff --git a/docs/src/cheatcodes/get_nonce.md b/docs/src/cheatcodes/get_nonce.md new file mode 100644 index 00000000..b94f3773 --- /dev/null +++ b/docs/src/cheatcodes/get_nonce.md @@ -0,0 +1,22 @@ +# `getNonce` + +## Description + +The `getNonce` cheatcode will get the current nonce of `account`. + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Get nonce and verify that the sender has sent at least one transaction +address acc = address(msg.sender); +assert(cheats.getNonce(acc) > 0); +``` + +## Function Signature + +```solidity +function getNonce(address account) external returns (uint64); +``` diff --git a/docs/src/cheatcodes/load.md b/docs/src/cheatcodes/load.md new file mode 100644 index 00000000..488891b4 --- /dev/null +++ b/docs/src/cheatcodes/load.md @@ -0,0 +1,27 @@ +# `load` + +## Description + +The `load` cheatcode will load storage slot `slot` for `account` + +## Example + +```solidity +contract TestContract { + uint x = 123; + function test() public { + // Obtain our cheat code contract reference. + IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + // Load and verify x + bytes32 value = cheats.load(address(this), bytes32(uint(0))); + assert(value == bytes32(uint(123))); + } +} +``` + +## Function Signature + +```solidity +function load(address account, bytes32 slot) external returns (bytes32); +``` diff --git a/docs/src/cheatcodes/parse_address.md b/docs/src/cheatcodes/parse_address.md new file mode 100644 index 00000000..335a7eb0 --- /dev/null +++ b/docs/src/cheatcodes/parse_address.md @@ -0,0 +1,30 @@ +# `parseAddress` + +## Description + +The `parseAddress` cheatcode will parse the input string into an address + +## Example + +```solidity +contract TestContract { + uint x = 123; + function test() public { + // Obtain our cheat code contract reference. + IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + address expectedAddress = 0x7109709ECfa91a80626fF3989D68f67F5b1DD12D; + string memory test = "0x7109709ECfa91a80626fF3989D68f67F5b1DD12D"; + + // Call cheats.parseAddress + address result = cheats.parseAddress(test); + assert(expectedAddress == result); + } +} +``` + +## Function Signature + +```solidity +function parseAddress(string calldata) external returns (address); +``` diff --git a/docs/src/cheatcodes/parse_bool.md b/docs/src/cheatcodes/parse_bool.md new file mode 100644 index 00000000..dbbc7241 --- /dev/null +++ b/docs/src/cheatcodes/parse_bool.md @@ -0,0 +1,30 @@ +# `parseBool` + +## Description + +The `parseBool` cheatcode will parse the input string into a boolean + +## Example + +```solidity +contract TestContract { + uint x = 123; + function test() public { + // Obtain our cheat code contract reference. + IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + bool expectedBool = true; + string memory test = "true"; + + // Call cheats.parseBool + bool result = cheats.parseBool(test); + assert(expectedBool == result); + } +} +``` + +## Function Signature + +```solidity +function parseBool(string calldata) external returns (bool); +``` diff --git a/docs/src/cheatcodes/parse_bytes.md b/docs/src/cheatcodes/parse_bytes.md new file mode 100644 index 00000000..4612116e --- /dev/null +++ b/docs/src/cheatcodes/parse_bytes.md @@ -0,0 +1,30 @@ +# `parseBytes` + +## Description + +The `parseBytes` cheatcode will parse the input string into bytes + +## Example + +```solidity +contract TestContract { + uint x = 123; + function test() public { + // Obtain our cheat code contract reference. + IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + bytes memory expectedBytes = "medusa"; + string memory test = "medusa"; + + // Call cheats.parseBytes + bytes memory result = cheats.parseBytes(test); + assert(keccak256(expectedBytes) == keccak256(result)); + } +} +``` + +## Function Signature + +```solidity +function parseBytes(string calldata) external returns (bytes memory); +``` diff --git a/docs/src/cheatcodes/parse_bytes32.md b/docs/src/cheatcodes/parse_bytes32.md new file mode 100644 index 00000000..6fb0ab2a --- /dev/null +++ b/docs/src/cheatcodes/parse_bytes32.md @@ -0,0 +1,30 @@ +# `parseBytes32` + +## Description + +The `parseBytes32` cheatcode will parse the input string into bytes32 + +## Example + +```solidity +contract TestContract { + uint x = 123; + function test() public { + // Obtain our cheat code contract reference. + IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + int256 expectedInt = -12345; + string memory test = "-12345"; + + // Call cheats.parseInt + int256 result = cheats.parseInt(test); + assert(expectedInt == result); + } +} +``` + +## Function Signature + +```solidity +function parseBytes32(string calldata) external returns (bytes32); +``` diff --git a/docs/src/cheatcodes/parse_int.md b/docs/src/cheatcodes/parse_int.md new file mode 100644 index 00000000..6a820a3c --- /dev/null +++ b/docs/src/cheatcodes/parse_int.md @@ -0,0 +1,30 @@ +# `parseInt` + +## Description + +The `parseInt` cheatcode will parse the input string into a int256 + +## Example + +```solidity +contract TestContract { + uint x = 123; + function test() public { + // Obtain our cheat code contract reference. + IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + address expectedAddress = 0x7109709ECfa91a80626fF3989D68f67F5b1DD12D; + string memory test = "0x7109709ECfa91a80626fF3989D68f67F5b1DD12D"; + + // Call cheats.parseAddress + address result = cheats.parseAddress(test); + assert(expectedAddress == result); + } +} +``` + +## Function Signature + +```solidity +function parseInt(string calldata) external returns (int256); +``` diff --git a/docs/src/cheatcodes/parse_uint.md b/docs/src/cheatcodes/parse_uint.md new file mode 100644 index 00000000..8c9f2e46 --- /dev/null +++ b/docs/src/cheatcodes/parse_uint.md @@ -0,0 +1,30 @@ +# `parseUint` + +## Description + +The `parseUint` cheatcode will parse the input string into a uint256 + +## Example + +```solidity +contract TestContract { + uint x = 123; + function test() public { + // Obtain our cheat code contract reference. + IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + uint256 expectedUint = 12345; + string memory test = "12345"; + + // Call cheats.parseUint + uint256 result = cheats.parseUint(test); + assert(expectedUint == result); + } +} +``` + +## Function Signature + +```solidity +function parseUint(string calldata) external returns (uint256); +``` diff --git a/docs/src/cheatcodes/prank.md b/docs/src/cheatcodes/prank.md new file mode 100644 index 00000000..f309ba3f --- /dev/null +++ b/docs/src/cheatcodes/prank.md @@ -0,0 +1,38 @@ +# `prank` + +## Description + +The `prank` cheatcode will set the `msg.sender` for _only the next call_ to the specified input address. Note that, +contrary to [`prank` in Foundry](https://book.getfoundry.sh/cheatcodes/prank#description), calling the cheatcode contract will count as a +valid "next call" + +## Example + +```solidity +contract TestContract { + address owner = address(123); + function transferOwnership(address _newOwner) public { + require(msg.sender == owner); + + // Change ownership + owner = _newOwner; + } + + function test() public { + // Obtain our cheat code contract reference. + IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + // Prank, change ownership, and verify + address newOwner = address(456); + cheats.prank(owner); + transferOwnership(newOwner); + assert(owner == newOwner); + } + } +``` + +## Function Signature + +```solidity +function prank(address) external; +``` diff --git a/docs/src/cheatcodes/prank_here.md b/docs/src/cheatcodes/prank_here.md new file mode 100644 index 00000000..5723cfb8 --- /dev/null +++ b/docs/src/cheatcodes/prank_here.md @@ -0,0 +1,49 @@ +# `prankHere` + +## Description + +The `prankHere` cheatcode will set the `msg.sender` to the specified input address until the current call exits. Compared +to `prank`, `prankHere` can persist for multiple calls. + +## Example + +```solidity +contract TestContract { + address owner = address(123); + uint256 x = 0; + uint256 y = 0; + + function updateX() public { + require(msg.sender == owner); + + // Update x + x = 1; + } + + function updateY() public { + require(msg.sender == owner); + + // Update y + y = 1; + } + + function test() public { + // Obtain our cheat code contract reference. + IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + // Prank, update variables, and verify + cheats.prank(owner); + updateX(); + updateY(); + assert((x == 1) && (y == 1)); + + // Once this function returns, the `msg.sender` is reset + } +} +``` + +## Function Signature + +```solidity +function prankHere(address) external; +``` diff --git a/docs/src/cheatcodes/roll.md b/docs/src/cheatcodes/roll.md new file mode 100644 index 00000000..57b901e0 --- /dev/null +++ b/docs/src/cheatcodes/roll.md @@ -0,0 +1,24 @@ +# `roll` + +## Description + +The `roll` cheatcode sets the `block.number` + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Change value and verify. +cheats.roll(7); +assert(block.number == 7); +cheats.roll(9); +assert(block.number == 9); +``` + +## Function Signature + +```solidity +function roll(uint256) external; +``` diff --git a/docs/src/cheatcodes/set_nonce.md b/docs/src/cheatcodes/set_nonce.md new file mode 100644 index 00000000..8d949c8e --- /dev/null +++ b/docs/src/cheatcodes/set_nonce.md @@ -0,0 +1,24 @@ +# setNonce + +## Description + +The `setNonce` cheatcode will set the nonce of `account` to `nonce`. Note that the `nonce` must be strictly greater than +the current nonce + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Set nonce and verify (assume nonce before `setNonce` was less than 7) +address acc = address(msg.sender); +cheats.setNonce(acc, 7); +assert(cheats.getNonce(acc) == 7); +``` + +## Function Signature + +```solidity +function setNonce(address account, uint64 nonce) external; +``` diff --git a/docs/src/cheatcodes/sign.md b/docs/src/cheatcodes/sign.md new file mode 100644 index 00000000..dea23c2c --- /dev/null +++ b/docs/src/cheatcodes/sign.md @@ -0,0 +1,28 @@ +# `sign` + +## Description + +The `sign` cheatcode will take in a private key `privateKey` and a hash digest `digest` to generate a `(v, r, s)` +signature + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +bytes32 digest = keccak256("Data To Sign"); + +// Call cheats.sign +(uint8 v, bytes32 r, bytes32 s) = cheats.sign(0x6df21769a2082e03f7e21f6395561279e9a7feb846b2bf740798c794ad196e00, digest); +address signer = ecrecover(digest, v, r, s); +assert(signer == 0xdf8Ef652AdE0FA4790843a726164df8cf8649339); +``` + +## Function Signature + +```solidity +function sign(uint256 privateKey, bytes32 digest) +external +returns (uint8 v, bytes32 r, bytes32 s); +``` diff --git a/docs/src/cheatcodes/snapshot.md b/docs/src/cheatcodes/snapshot.md new file mode 100644 index 00000000..27298f6b --- /dev/null +++ b/docs/src/cheatcodes/snapshot.md @@ -0,0 +1,68 @@ +# `snapshot` and `revertTo` + +## Description + +The `snapshot` cheatcode will take a snapshot of the current state of the blockchain and return an identifier for the +snapshot. + +On the flipside, the `revertTo` cheatcode will revert the EVM state back based on the provided identifier. + +## Example + +```solidity +interface CheatCodes { + function warp(uint256) external; + + function deal(address, uint256) external; + + function snapshot() external returns (uint256); + + function revertTo(uint256) external returns (bool); +} + +struct Storage { + uint slot0; + uint slot1; +} + +contract TestContract { + Storage store; + uint256 timestamp; + + function test() public { + // Obtain our cheat code contract reference. + CheatCodes cheats = CheatCodes( + 0x7109709ECfa91a80626fF3989D68f67F5b1DD12D + ); + + store.slot0 = 10; + store.slot1 = 20; + timestamp = block.timestamp; + cheats.deal(address(this), 5 ether); + + // Save state + uint256 snapshot = cheats.snapshot(); + + // Change state + store.slot0 = 300; + store.slot1 = 400; + cheats.deal(address(this), 500 ether); + cheats.warp(12345); + + // Assert that state has been changed + assert(store.slot0 == 300); + assert(store.slot1 == 400); + assert(address(this).balance == 500 ether); + assert(block.timestamp == 12345); + + // Revert to snapshot + cheats.revertTo(snapshot); + + // Ensure state has been reset + assert(store.slot0 == 10); + assert(store.slot1 == 20); + assert(address(this).balance == 5 ether); + assert(block.timestamp == timestamp); + } +} +``` diff --git a/docs/src/cheatcodes/store.md b/docs/src/cheatcodes/store.md new file mode 100644 index 00000000..d3fb580d --- /dev/null +++ b/docs/src/cheatcodes/store.md @@ -0,0 +1,27 @@ +# `store` + +## Description + +The `store` cheatcode will store `value` in storage slot `slot` for `account` + +## Example + +```solidity +contract TestContract { + uint x = 123; + function test() public { + // Obtain our cheat code contract reference. + IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + + // Store into x, verify it. + cheats.store(address(this), bytes32(uint(0)), bytes32(uint(456))); + assert(y == 456); + } +} +``` + +## Function Signature + +```solidity +function store(address account, bytes32 slot, bytes32 value) external; +``` diff --git a/docs/src/cheatcodes/to_string.md b/docs/src/cheatcodes/to_string.md new file mode 100644 index 00000000..75e4182f --- /dev/null +++ b/docs/src/cheatcodes/to_string.md @@ -0,0 +1,84 @@ +# `toString` + +## Description + +The `toString` cheatcodes aid in converting primitive Solidity types into strings. Similar to +[Foundry's behavior](https://book.getfoundry.sh/cheatcodes/to-string?highlight=toStr#description), bytes are converted +to a hex-encoded string with `0x` prefixed. + +## Example + +```solidity +contract TestContract { + IStdCheats cheats; + + constructor() { + cheats = IStdCheats(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + } + + function testAddress() public { + address test = 0x7109709ECfa91a80626fF3989D68f67F5b1DD12D; + string memory expectedString = "0x7109709ECfa91a80626fF3989D68f67F5b1DD12D"; + + // Call cheats.toString + string memory result = cheats.toString(test); + assert(keccak256(abi.encodePacked(result)) == keccak256(abi.encodePacked(expectedString))); + } + + function testBool() public { + bool test = true; + string memory expectedString = "true"; + + // Call cheats.toString + string memory result = cheats.toString(test); + assert(keccak256(abi.encodePacked(result)) == keccak256(abi.encodePacked(expectedString))); + } + + function testUint256() public { + uint256 test = 12345; + string memory expectedString = "12345"; + + // Call cheats.toString + string memory result = cheats.toString(test); + assert(keccak256(abi.encodePacked(result)) == keccak256(abi.encodePacked(expectedString))); + } + + function testInt256() public { + int256 test = -12345; + string memory expectedString = "-12345"; + + // Call cheats.toString + string memory result = cheats.toString(test); + assert(keccak256(abi.encodePacked(result)) == keccak256(abi.encodePacked(expectedString))); + } + + function testBytes32() public { + bytes32 test = "medusa"; + string memory expectedString = "0x6d65647573610000000000000000000000000000000000000000000000000000"; + + // Call cheats.toString + string memory result = cheats.toString(test); + assert(keccak256(abi.encodePacked(result)) == keccak256(abi.encodePacked(expectedString))); + } + + function testBytes() public { + bytes memory test = "medusa"; + string memory expectedString = "0x6d6564757361"; + + // Call cheats.toString + string memory result = cheats.toString(test); + assert(keccak256(abi.encodePacked(result)) == keccak256(abi.encodePacked(expectedString))); + } +} +``` + +## Function Signatures + +```solidity +function toString(address) external returns (string memory); +function toString(bool) external returns (string memory); +function toString(uint256) external returns (string memory); +function toString(int256) external returns (string memory); +function toString(bytes32) external returns (string memory); +function toString(bytes) external returns (string memory); +``` diff --git a/docs/src/cheatcodes/warp.md b/docs/src/cheatcodes/warp.md new file mode 100644 index 00000000..22830645 --- /dev/null +++ b/docs/src/cheatcodes/warp.md @@ -0,0 +1,24 @@ +# warp + +## Description + +The `warp` cheatcode sets the `block.timestamp` + +## Example + +```solidity +// Obtain our cheat code contract reference. +IStdCheats cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); + +// Change value and verify. +cheats.warp(7); +assert(block.timestamp == 7); +cheats.warp(9); +assert(block.timestamp == 9); +``` + +## Function Signature + +```solidity +function warp(uint256) external; +``` diff --git a/docs/src/cli/completion.md b/docs/src/cli/completion.md new file mode 100644 index 00000000..242355db --- /dev/null +++ b/docs/src/cli/completion.md @@ -0,0 +1,21 @@ +# `completion` + +`medusa` provides the ability to generate autocompletion scripts for a given shell. +Once the autocompletion script is ran for a given shell, `medusa`'s commands and flags can be tab-autocompleted. +The following shells are supported: + +1. `bash` +2. `zsh` +3. `Powershell` + +To understand how to run the autocompletion script for a given shell, run the following command: + +```shell +medusa completion --help +``` + +Once you know how to run the autocompletion script, retrieve the script for that given shell using the following command: + +```shell +medusa completion +``` diff --git a/docs/src/cli/fuzz.md b/docs/src/cli/fuzz.md new file mode 100644 index 00000000..ed70d15a --- /dev/null +++ b/docs/src/cli/fuzz.md @@ -0,0 +1,131 @@ +# `fuzz` + +The `fuzz` command will initiate a fuzzing campaign: + +```shell +medusa fuzz [flags] +``` + +## Supported Flags + +### `--config` + +The `--config` flag allows you to specify the path for your [project configuration](../project_configuration/overview.md) +file. If the `--config` flag is not used, `medusa` will look for a [`medusa.json`](../static/medusa.json) file in the +current working directory. + +```shell +# Set config file path +medusa fuzz --out myConfig.json +``` + +### `--compilation-target` + +The `--compilation-target` flag allows you to specify the compilation target. If you are using `crytic-compile`, please review the +warning [here](../project_configuration/compilation_config.md#target) about changing the compilation target. + +```shell +# Set compilation target +medusa fuzz --target TestMyContract.sol +``` + +### `--workers` + +The `--workers` flag allows you to update the number of threads that will perform parallelized fuzzing (equivalent to +[`fuzzing.workers`](../project_configuration/fuzzing_config.md#workers)) + +```shell +# Set workers +medusa fuzz --workers 20 +``` + +### `--timeout` + +The `--timeout` flag allows you to update the duration of the fuzzing campaign (equivalent to +[`fuzzing.timeout`](../project_configuration/fuzzing_config.md#timeout)) + +```shell +# Set timeout +medusa fuzz --timeout 100 +``` + +### `--test-limit` + +The `--test-limit` flag allows you to update the number of transactions to run before stopping the fuzzing campaign +(equivalent to [`fuzzing.testLimit`](../project_configuration/fuzzing_config.md#testlimit)) + +```shell +# Set test limit +medusa fuzz --test-limit 100000 +``` + +### `--seq-len` + +The `--seq-len` flag allows you to update the length of a call sequence (equivalent to +[`fuzzing.callSequenceLength`](../project_configuration/fuzzing_config.md#callsequencelength)) + +```shell +# Set sequence length +medusa fuzz --seq-len 50 +``` + +### `--target-contracts` + +The `--target-contracts` flag allows you to update the target contracts for fuzzing (equivalent to +[`fuzzing.targetContracts`](../project_configuration/fuzzing_config.md#targetcontracts)) + +```shell +# Set target contracts +medusa fuzz --target-contracts "TestMyContract, TestMyOtherContract" +``` + +### `--corpus-dir` + +The `--corpus-dir` flag allows you to set the path for the corpus directory (equivalent to +[`fuzzing.corpusDirectory`](../project_configuration/fuzzing_config.md#corpusdirectory)) + +```shell +# Set corpus directory +medusa fuzz --corpus-dir corpus +``` + +### `--senders` + +The `--senders` flag allows you to update `medusa`'s senders (equivalent to +[`fuzzing.senderAddresses`](../project_configuration/fuzzing_config.md#senderaddresses)) + +```shell +# Set sender addresses +medusa fuzz --senders "0x50000,0x60000,0x70000" +``` + +### `--deployer` + +The `--deployer` flag allows you to update `medusa`'s contract deployer (equivalent to +[`fuzzing.deployerAddress`](../project_configuration/fuzzing_config.md#deployeraddress)) + +```shell +# Set deployer address +medusa fuzz --deployer "0x40000" +``` + +### `--trace-all` + +The `--trace-all` flag allows you to retrieve an execution trace for each element of a call sequence that triggered a test +failure (equivalent to +[`testing.traceAll`](../project_configuration/testing_config.md#traceall) + +```shell +# Trace each call +medusa fuzz --trace-all +``` + +### `--no-color` + +The `--no-color` flag disables colored console output (equivalent to +[`logging.NoColor`](../project_configuration/logging_config.md#nocolor)) + +```shell +# Disable colored output +medusa fuzz --no-color +``` diff --git a/docs/src/cli/init.md b/docs/src/cli/init.md new file mode 100644 index 00000000..e6f13cac --- /dev/null +++ b/docs/src/cli/init.md @@ -0,0 +1,36 @@ +# `init` + +The `init` command will generate the project configuration file within your current working directory: + +```shell +medusa init [platform] [flags] +``` + +By default, the project configuration file will be named `medusa.json`. You can learn more about `medusa`'s project +configuration [here](../project_configuration/overview.md) and also view an [example project configuration file](../static/medusa.json). + +Invoking this command without a `platform` argument will result in `medusa` using `crytic-compile` as the default compilation platform. +Currently, the only other supported platform is `solc`. If you are using a compilation platform such as Foundry or Hardhat, +it is best to use `crytic-compile`. + +## Supported Flags + +### `--out` + +The `--out` flag allows you to specify the output path for the project configuration file. Thus, you can name the file +something different from `medusa.json` or have the configuration file be placed elsewhere in your filesystem. + +```shell +# Set config file path +medusa init --out myConfig.json +``` + +### `--compilation-target` + +The `--compilation-target` flag allows you to specify the compilation target. If you are using `crytic-compile`, please review the +warning [here](../project_configuration/compilation_config.md#target) about changing the compilation target. + +```shell +# Set compilation target +medusa init --compilation-target TestMyContract.sol +``` diff --git a/docs/src/cli/overview.md b/docs/src/cli/overview.md new file mode 100644 index 00000000..b35fb74d --- /dev/null +++ b/docs/src/cli/overview.md @@ -0,0 +1,10 @@ +# CLI Overview + +The `medusa` CLI is used to perform parallelized fuzz testing of smart contracts. After you have `medusa` +[installed](../getting_started/installation.md), you can run `medusa help` in your terminal to view the available commands. + +The CLI supports three main commands with each command having a variety of flags: + +- [`medusa init`](./init.md) +- [`medusa fuzz`](./fuzz.md) +- [`medusa completion`](./completion.md) diff --git a/docs/src/console_logging.md b/docs/src/console_logging.md new file mode 100644 index 00000000..63d69510 --- /dev/null +++ b/docs/src/console_logging.md @@ -0,0 +1,55 @@ +# Console Logging + +Console logging in medusa is similar to the functionality found in Foundry or Hardhat (except for string formatting, +see [below](#differences-in-consolelogformatargs)). Note that if you are not using +Foundry or Hardhat as your compilation platform, you can retrieve the necessary `console.sol` library +[here](https://github.com/foundry-rs/forge-std/blob/master/src/console.sol). + +For more information on the available function signatures and general tips on console logging, please review [Foundry's +documentation](https://book.getfoundry.sh/reference/forge-std/console-log#console-logging). + +## Differences in `console.log(format[,...args])` + +The core functionality of string formatting is the same. If you want to string format an `int256`, the only supported function signature is: +`function log(string memory, int256) external;`. Otherwise, the supported argument types are `string`, `bool`, `address`, +and `uint256`. This capability is the same as in Foundry. + +The core difference in medusa's string formatting is the specifiers that are allowed for the +formatted string. The supported specifiers are as follows: + +- `%v`: The value will be printed in its default format. This will work for `uint256`, `int256`, `address`, + `bool`, and `string`. Using `%v` is the **recommended** specifier for all argument types. +- `%s`: The values will be converted into a human-readable string. This will work for `uint256`, `int256`, `address`, and + `string`. Contrary to Foundry or Hardhat, `%s` will not work for `bool`. Additionally, `uint256` and `int256` will _not_ + be provided in their hex-encoded format. This is the **recommended** specifier for projects that wish to maintain + compatibility with an existing fuzz test suite from Foundry. Special exceptions will need to be made for `bool` arguments. + For example, you could use the `console.logBool(bool)` function to separately log the `bool`. +- `%d`: This can be used for `uint256` and `int256`. +- `%i`: This specifier is not supported by medusa for `int256` and `uint256` +- `%e`: This specifier is not supported by medusa for `int256` and `uint256`. +- `%x`: This provides the hexadecimal representation of `int256` and `uint256`. +- `%o`: This specifier is not supported by medusa. `%o` in medusa will provide the base-8 representation of `int256` and + `uint256`. +- `%t`: This can be used for `bool`. +- `%%`: This will print out "%" and not consume an argument. + +If a specifier does not have a corresponding argument, the following is returned: + +```solidity +console.log("My name is %s %s", "medusa"); +// Returns: "My name is medusa %!s(MISSING)" +``` + +If there are more arguments than specifiers, the following is returned: + +```solidity +console.log("My name is %s", "medusa", "fuzzer"); +// Returns: "My name is medusa%!(EXTRA string=fuzzer)" +``` + +If only a format string with no arguments is provided, the string is returned with no formatting: + +```solidity +console.log("%% %s"); +// Returns: "%% %s" +``` diff --git a/docs/src/faq.md b/docs/src/faq.md new file mode 100644 index 00000000..4d5ea1b9 --- /dev/null +++ b/docs/src/faq.md @@ -0,0 +1,16 @@ +# Frequently Asked Questions + +**Why create a new fuzzer if Echidna is already a great fuzzer?** + +With medusa, we are exploring a different EVM implementation and language for our smart contract fuzzer. While Echidna is already doing an amazing job, medusa offers the following advantages: + +- It is written in Go, easing the maintenance and allowing the creation of a native API for future integration into other projects. +- It uses geth as a base, ensuring the EVM equivalence. + +**Should I switch to medusa right away?** + +We do not recommend switching to medusa until it is extensively tested. However we encourage you to try it, and [let us know your experience](https://github.com/trailofbits/medusa/issues). In that sense, Echidna is our robust and well tested fuzzer, while medusa is our new exploratory fuzzer. [Follow us](https://twitter.com/trailofbits/) to hear updates about medusa as it grows in maturity. + +**Will all the previous available documentation from [secure-contracts.com](https://secure-contracts.com/) will apply to medusa?** + +In general, yes. All the information on testing approaches and techniques will apply for medusa. There are, however, different configuration options names and a few missing or different features in medusa from Echidna that we will be updating over time. diff --git a/docs/src/getting_started/first_steps.md b/docs/src/getting_started/first_steps.md new file mode 100644 index 00000000..7cd8456d --- /dev/null +++ b/docs/src/getting_started/first_steps.md @@ -0,0 +1,36 @@ +# First Steps + +After installation, you are ready to use `medusa` on your first codebase. This chapter will walk you through initializing +`medusa` for a project and then starting to fuzz. + +To initialize medusa for a project, `cd` into your project and run [`medusa init`](../cli/init.md): + +```shell +# Change working directory +cd my_project + +# Initialize medusa +medusa init +``` + +This will create a `medusa.json` file which holds a large number of [configuration options](../project_configuration/overview.md). +`medusa` will use this configuration file to determine how and what to fuzz. + +All there is left to do now is to run `medusa` on some fuzz tests: + +```shell +medusa fuzz --target-contracts "TestContract" --test-limit 10_000 +``` + +The `--target-contracts` flag tells `medusa` which contracts to run fuzz tests on. You can specify more than one +contract to fuzz test at once (e.g. `--target-contracts "TestContract, TestOtherContract"`). The `--test-limit` flag +tells `medusa` to execute `10_000` transactions before stopping the fuzzing campaign. + +> Note: The target contracts and the test limit can also be configured via the project configuration file, which is the +> **recommended** route. The `--target-contracts` flag is equivalent to the +> [`fuzzing.targetContracts`](../project_configuration/fuzzing_config.md#targetcontracts) configuration option and the +> `-test-limit` flag is equivalent to the [`fuzzing.testLimit`](../project_configuration/fuzzing_config.md#testlimit) +> configuration option. + +It is recommended to review the [Configuration Overview](../project_configuration/overview.md) next and learn more about +[`medusa`'s CLI](../cli/overview.md). diff --git a/docs/src/getting_started/installation.md b/docs/src/getting_started/installation.md new file mode 100644 index 00000000..531758ec --- /dev/null +++ b/docs/src/getting_started/installation.md @@ -0,0 +1,64 @@ +# Installation + +There are three main ways to install `medusa` at the moment. The first is using Homebrew, +building from source, or installing a precompiled binary. + +If you have any difficulty with installing `medusa`, please [open an issue](https://github.com/crytic/medusa/issues) on GitHub. + +## Installing with Homebrew + +Note that using Homebrew is only viable (and recommended) for macOS and Linux users. For Windows users, you must +[build from source](#building-from-source) or [install a precompiled binary](#precompiled-binaries). + +### Prerequisites + +Installation instructions for Homebrew can be found [here](https://brew.sh/). + +### Install `medusa` + +Run the following command to install `medusa`: + +```shell +brew install medusa +``` + +## Building from source + +### Prerequisites + +Before downloading `medusa`, you will need to download Golang and `crytic-compile`. + +- Installation instructions for Golang can be found [here](https://go.dev/doc/install) +- Installation instructions for `crytic-compile` can be found [here](https://github.com/crytic/crytic-compile#installation) + - Note that `crytic-compile` requires a Python environment. Installation instructions for Python can be found + [here](https://www.python.org/downloads/). + +### Build `medusa` + +Run the following commands to build `medusa` (this should work on all OSes): + +```shell +# Clone the repository +git clone https://github.com/crytic/medusa + +# Build medusa +cd medusa +go build -trimpath +``` + +You will now need to move the binary (`medusa` or `medusa.exe`) to somewhere in your `PATH` environment variable so that +it is accessible via the command line. Please review the instructions +[here](https://zwbetz.com/how-to-add-a-binary-to-your-path-on-macos-linux-windows/) (if you are a Windows user, we +recommend using the Windows GUI). + +## Precompiled binaries + +The precompiled binaries can be downloaded on `medusa`'s [GitHub releases page](https://github.com/crytic/medusa/releases). + +> **_NOTE:_** macOS may set the [quarantine extended attribute](https://superuser.com/questions/28384/what-should-i-do-about-com-apple-quarantine) +> on the downloaded zip file. To remove this attribute, run the following command: +> `sudo xattr -rd com.apple.quarantine `. + +Once installed, you will need to unzip the file and move the binary to somewhere in your `$PATH`. Please review the instructions +[here](https://zwbetz.com/how-to-add-a-binary-to-your-path-on-macos-linux-windows/) (if you are a Windows user, we +recommend using the Windows GUI). diff --git a/docs/src/project_configuration/chain_config.md b/docs/src/project_configuration/chain_config.md new file mode 100644 index 00000000..13bc0685 --- /dev/null +++ b/docs/src/project_configuration/chain_config.md @@ -0,0 +1,31 @@ +# Chain Configuration + +The chain configuration defines the parameters for setting up `medusa`'s underlying blockchain. + +### `codeSizeCheckDisabled` + +- **Type**: Boolean +- **Description**: If `true`, the maximum code size check of 24576 bytes in `go-ethereum` is disabled. +- > 🚩 Setting `codeSizeCheckDisabled` to `false` is not recommended since it complicates the fuzz testing process. +- **Default**: `true` + +### `skipAccountChecks` + +- **Type**: Boolean +- **Description**: If `true`, account-related checks (nonce validation, transaction origin must be an EOA) are disabled in `go-ethereum`. +- **Default**: `true` + +## Cheatcode Configuration + +### `cheatCodesEnabled` + +- **Type**: Boolean +- **Description**: Determines whether cheatcodes are enabled. +- **Default**: `true` + +### `enableFFI` + +- **Type**: Boolean +- **Description**: Determines whether the `ffi` cheatcode is enabled. + > 🚩 Enabling the `ffi` cheatcode may allow for arbitrary code execution on your machine. +- **Default**: `false` diff --git a/docs/src/project_configuration/compilation_config.md b/docs/src/project_configuration/compilation_config.md new file mode 100644 index 00000000..4e298fdf --- /dev/null +++ b/docs/src/project_configuration/compilation_config.md @@ -0,0 +1,59 @@ +# Compilation Configuration + +The compilation configuration defines the parameters to use while compiling a target file or project. + +### `platform` + +- **Type**: String +- **Description**: Refers to the type of platform to be used to compile the underlying target. Currently, + `crytic-compile` or `solc` can be used as the compilation platform. +- **Default**: `crytic-compile` + +### `platformConfig` + +- **Type**: Struct +- **Description**: This struct is a platform-dependent structure which offers parameters for compiling the underlying project. + See below for the structure of `platformConfig` for each compilation platform. +- **Default**: The `platformConfig` for `crytic-compile` is the default value for this struct. + +### `platformConfig` for `crytic-compile` + +#### `target` + +- **Type**: String +- **Description**: Refers to the target that is being compiled. + > 🚩 Note that if you are using a compilation platform, such as Foundry or Hardhat, the default value for `target`, `.`, + > should **not** be changed. The `.` is equivalent to telling `crytic-compile` that the entire project needs to compiled, + > including any dependencies and remappings. In fact, unless you want to compile a single file, that has no third-party + > imports from, for example, OpenZeppelin, the default value should not be changed. +- **Default**: `.` + +#### `solcVersion` + +- **Type**: String +- **Description**: Describes the version of `solc` that will be installed and then used for compilation. Note that if you + are using a compilation platform, such as Foundry or Hardhat, this option does not need to be set. +- **Default**: "" + +#### `exportDirectory` + +- **Type**: String +- **Description**: Describes the directory where all compilation artifacts should be stored after compilation. Leaving it + empty will lead to the compilation artifacts being stored in `crytic-export/`. +- **Default**: "" + +#### `args` + +- **Type**: [String] +- **Description**: Refers to any additional args that one may want to provide to `crytic-compile`. Run `crytic-compile --help` + to view all of its supported flags. For example, if you would like to specify `--compile-force-framework foundry`, the + `args` value will be `"args": ["--compile-force-framework", "foundry"]`. + > 🚩 The `--export-format` and `--export-dir` are already used during compilation with `crytic-compile`. + > Re-using these flags in `args` will cause the compilation to fail. + +### `platformConfig` for `solc` + +#### `target` + +- **Type**: String +- **Description**: Refers to the target that is being compiled. The target must be a single `.sol` file. diff --git a/docs/src/project_configuration/fuzzing_config.md b/docs/src/project_configuration/fuzzing_config.md new file mode 100644 index 00000000..5adcd067 --- /dev/null +++ b/docs/src/project_configuration/fuzzing_config.md @@ -0,0 +1,219 @@ +# Fuzzing Configuration + +The fuzzing configuration defines the parameters for the fuzzing campaign. + +### `workers` + +- **Type**: Integer +- **Description**: The number of worker threads to parallelize fuzzing operations on. +- **Default**: 10 workers + +### `workerResetLimit` + +- **Type**: Integer +- **Description**: The number of call sequences a worker should process on its underlying chain before being fully reset, + freeing memory. After resetting, the worker will be re-created and continue processing of call sequences. + > 🚩 This setting, along with `workers` influence the speed and memory consumption of the fuzzer. Setting this value + > higher will result in greater memory consumption per worker. Setting it too high will result in the in-memory + > chain's database growing to a size that is slower to process. Setting it too low may result in frequent worker resets + > that are computationally expensive for complex contract deployments that need to be replayed during worker reconstruction. +- **Default**: 50 sequences + +### `timeout` + +- **Type**: Integer +- **Description**: The number of seconds before the fuzzing campaign should be terminated. If a zero value is provided, + the timeout will not be enforced. The timeout begins after compilation succeeds and the fuzzing campaign has started. +- **Default**: 0 seconds + +### `testLimit` + +- **Type**: Integer +- **Description**: The number of function calls to make before the fuzzing campaign should be terminated. If a zero value + is provided, no test limit will be enforced. +- **Default**: 0 calls + +### `callSequenceLength` + +- **Type**: Integer +- **Description**: The maximum number of function calls to generate in a single call sequence in the attempt to violate + properties. After every `callSequenceLength` function calls, the blockchain is reset for the next sequence of transactions. +- **Default**: 100 calls/sequence + +### `coverageEnabled` + +- **Type**: Boolean +- **Description**: Whether coverage-increasing call sequences should be saved for the fuzzer to mutate/re-use. + Enabling coverage allows for improved code exploration. +- **Default**: `true` + +### `corpusDirectory` + +- **Type**: String +- **Description**: The file path where the corpus should be saved. The corpus collects sequences during a fuzzing campaign + that help drive fuzzer features (e.g. a call sequence that increases code coverage is stored in the corpus). These sequences + can then be re-used/mutated by the fuzzer during the next fuzzing campaign. +- **Default**: "" + +### `coverageFormats` + +- **Type**: [String] (e.g. `["lcov"]`) +- **Description**: The coverage reports to generate after the fuzzing campaign has completed. The coverage reports are saved + in the `coverage` directory within `crytic-export/` or `corpusDirectory` if configured. +- **Default**: `["lcov", "html"]` + +### `targetContracts` + +- **Type**: [String] (e.g. `[FirstContract, SecondContract, ThirdContract]`) +- **Description**: The list of contracts that will be deployed on the blockchain and then targeted for fuzzing by `medusa`. + For single-contract compilations, this value can be left as `[]`. This, however, is rare since most projects are multi-contract compilations. + > 🚩 Note that the order specified in the array is the _order_ in which the contracts are deployed to the blockchain. + > Thus, if you have a `corpusDirectory` set up, and you change the order of the contracts in the array, the corpus may no + > longer work since the contract addresses of the target contracts will change. This may render the entire corpus useless. +- **Default**: `[]` + +### `predeployedContracts` + +- **Type**: `{"contractName": "contractAddress"}` (e.g.`{"TestContract": "0x1234"}`) +- **Description**: This configuration parameter allows you to deterministically deploy contracts at predefined addresses. + > 🚩 Predeployed contracts do not accept constructor arguments. This may be added in the future. +- **Default**: `{}` + +### `targetContractBalances` + +- **Type**: [Base-16 Strings] (e.g. `[0x123, 0x456, 0x789]`) +- **Description**: The starting balance for each contract in `targetContracts`. If the `constructor` for a target contract + is marked `payable`, this configuration option can be used to send ether during contract deployment. Note that this array + has a one-to-one mapping to `targetContracts`. Thus, if `targetContracts` is `[A, B, C]` and `targetContractsBalances` is + `["0", "0xff", "0"]`, then `B` will have a starting balance of 255 wei and `A` and `C` will have zero wei. Note that the wei-value + has to be hex-encoded and _cannot_ have leading zeros. For an improved user-experience, the balances may be encoded as base-10 + format strings in the future. +- **Default**: `[]` + +### `constructorArgs` + +- **Type**: `{"contractName": {"variableName": _value}}` +- **Description**: If a contract in the `targetContracts` has a `constructor` that takes in variables, these can be specified here. + An example can be found [here](#using-constructorargs). +- **Default**: `{}` + +### `deployerAddress` + +- **Type**: Address +- **Description**: The address used to deploy contracts on startup, represented as a hex string. + > 🚩 Changing this address may render entries in the corpus invalid since the addresses of the target contracts will change. +- **Default**: `0x30000` + +### `senderAddresses` + +- **Type**: [Address] +- **Description**: Defines the account addresses used to send function calls to deployed contracts in the fuzzing campaign. + > 🚩 Changing these addresses may render entries in the corpus invalid since the sender(s) of corpus transactions may no + > longer be valid. +- **Default**: `[0x10000, 0x20000, 0x30000]` + +### `blockNumberDelayMax` + +- **Type**: Integer +- **Description**: Defines the maximum block number jump the fuzzer should make between test transactions. The fuzzer + will use this value to make the next block's `block.number` between `[1, blockNumberDelayMax]` more than that of the previous + block. Jumping `block.number` allows `medusa` to enter code paths that require a given number of blocks to pass. +- **Default**: `60_480` + +### `blockTimestampDelayMax` + +- **Type**: Integer +- **Description**: The number of the maximum block timestamp jump the fuzzer should make between test transactions. + The fuzzer will use this value to make the next block's `block.timestamp` between `[1, blockTimestampDelayMax]` more + than that of the previous block. Jumping `block.timestamp`time allows `medusa` to enter code paths that require a given amount of time to pass. +- **Default**: `604_800` + +### `blockGasLimit` + +- **Type**: Integer +- **Description**: The maximum amount of gas a block's transactions can use in total (thus defining max transactions per block). + > 🚩 It is advised not to change this naively, as a minimum must be set for the chain to operate. +- **Default**: `125_000_000` + +### `transactionGasLimit` + +- **Type**: Integer +- **Description**: Defines the amount of gas sent with each fuzzer-generated transaction. + > 🚩 It is advised not to change this naively, as a minimum must be set for the chain to operate. +- **Default**: `12_500_000` + +## Using `constructorArgs` + +There might be use cases where contracts in `targetContracts` have constructors that accept arguments. The `constructorArgs` +configuration option allows you to specify those arguments. `constructorArgs` is a nested dictionary that maps +contract name -> variable name -> variable value. Let's look at an example below: + +```solidity +// This contract is used to test deployment of contracts with constructor arguments. +contract TestContract { + struct Abc { + uint a; + bytes b; + } + + uint x; + bytes2 y; + Abc z; + + constructor(uint _x, bytes2 _y, Abc memory _z) { + x = _x; + y = _y; + z = _z; + } +} + +contract DependentOnTestContract { + address deployed; + + constructor(address _deployed) { + deployed = _deployed; + } +} +``` + +In the example above, we have two contracts `TestContract` and `DependentOnTestContract`. You will note that +`DependentOnTestContract` requires the deployment of `TestContract` _first_ so that it can accept the address of where +`TestContract` was deployed. On the other hand, `TestContract` requires `_x`, `_y`, and `_z`. Here is what the +`constructorArgs` value would look like for the above deployment: + +> **Note**: The example below has removed all the other project configuration options outside of `targetContracts` and +> `constructorArgs` + +```json +{ + "fuzzing": { + "targetContracts": ["TestContract", "DependentOnTestContract"], + "constructorArgs": { + "TestContract": { + "_x": "123456789", + "_y": "0x5465", + "_z": { + "a": "0x4d2", + "b": "0x54657374206465706c6f796d656e74207769746820617267756d656e7473" + } + }, + "DependentOnTestContract": { + "_deployed": "DeployedContract:TestContract" + } + } + } +} +``` + +First, let us look at `targetContracts`. As mentioned in the [documentation for `targetContracts`](#targetcontracts), +the order of the contracts in the array determine the order of deployment. This means that `TestContract` will be +deployed first, which is what we want. + +Now, let us look at `constructorArgs`. `TestContract`'s dictionary specifies the _exact name_ of the constructor argument +(e.g. `_x` or `_y`) with their associated value. Since `_z` is of type `TestContract.Abc`, `_z` is also a dictionary +that specifies each field in the `TestContract.Abc` struct. + +For `DependentOnTestContract`, the `_deployed` key has +a value of `DeployedContract:TestContract`. This tells `medusa` to look for a deployed contract that has the name +`TestContract` and provide its address as the value for `_deployed`. Thus, whenever you need a deployed contract's +address as an argument for another contract, you must follow the format `DeployedContract:`. diff --git a/docs/src/project_configuration/logging_config.md b/docs/src/project_configuration/logging_config.md new file mode 100644 index 00000000..fb0af649 --- /dev/null +++ b/docs/src/project_configuration/logging_config.md @@ -0,0 +1,25 @@ +# Logging Configuration + +The logging configuration defines the parameters for logging to console and/or file. + +### `level` + +- **Type**: String +- **Description**: The log level will determine which logs are emitted or discarded. If `level` is "info" then all logs + with informational level or higher will be logged. The supported values for `level` are "trace", "debug", "info", "warn", "error", + and "panic". +- **Default**: "info" + +### `logDirectory` + +- **Type**: String +- **Description**: Describes what directory log files should be outputted. Have a non-empty `logDirectory` value will + enable "file logging" which will result in logs to be output to both console and file. Note that the directory path is + _relative_ to the directory containing the project configuration file. +- **Default**: "" + +### `noColor` + +- **Type**: Boolean +- **Description**: Disables colored output to console. +- **Default**: `false` diff --git a/docs/src/project_configuration/overview.md b/docs/src/project_configuration/overview.md new file mode 100644 index 00000000..972bd0c7 --- /dev/null +++ b/docs/src/project_configuration/overview.md @@ -0,0 +1,49 @@ +# Configuration Overview + +`medusa`'s project configuration provides extensive and granular control over the execution of the fuzzer. The project +configuration is a `.json` file that is broken down into five core components. + +- [Fuzzing Configuration](./fuzzing_config.md): The fuzzing configuration dictates the parameters with which the fuzzer will execute. +- [Testing Configuration](./testing_config.md): The testing configuration dictates how and what `medusa` should fuzz test. +- [Chain Configuration](./chain_config.md): The chain configuration dictates how `medusa`'s underlying blockchain should be configured. +- [Compilation Configuration](./compilation_config.md): The compilation configuration dictates how to compile the fuzzing target. +- [Logging Configuration](./logging_config.md): The logging configuration dictates when and where to log events. + +To generate a project configuration file, run [`medusa init`](../cli/init.md). + +You can also view this [example project configuration file](../static/medusa.json) for visualization. + +## Recommended Configuration + +A common issue that first-time users face is identifying which configuration options to change. `medusa` provides an +incredible level of flexibility on how the fuzzer should run but this comes with a tradeoff of understanding the nuances +of what configuration options control what feature. Outlined below is a list of configuration options that we recommend +you become familiar with and change before starting to fuzz test. + +> **Note:** Having an [example project configuration file](../static/medusa.json) open will aid in visualizing which +> configuration options to change. + +### `fuzzing.targetContracts` + +Updating this configuration option is **required**! The `targetContracts` configuration option tells `medusa` which contracts +to fuzz test. You can specify one or more contracts for this option which is why it accepts an array +of strings. Let's say you have a fuzz testing contract called `TestStakingContract` that you want to test. +Then, you would set the value of `targetContracts` to `["TestStakingContract"]`. +You can learn more about this option [here](./fuzzing_config.md#targetcontracts). + +### `fuzzing.testLimit` + +Updating test limit is optional but recommended. Test limit determines how many transactions `medusa` will execute before +stopping the fuzzing campaign. By default, the `testLimit` is set to 0. This means that `medusa` will run indefinitely. +While you iterate over your fuzz tests, it is beneficial to have a non-zero value. Thus, it is recommended to update this +value to `10_000` or `100_000` depending on the use case. You can learn more about this option [here](./fuzzing_config.md#testlimit). + +### `fuzzing.corpusDirectory` + +Updating the corpus directory is optional but recommended. The corpus directory determines where corpus items should be +stored on disk. A corpus item is a sequence of transactions that increased `medusa`'s coverage of the system. Thus, these +corpus items are valuable to store so that they can be re-used for the next fuzzing campaign. Additionally, the directory +will also hold [coverage reports](../testing/coverage_reports.md) which is a valuable tool for debugging and validation. For most cases, you may set +`corpusDirectory`'s value to "corpus". This will create a `corpus/` directory in the same directory as the `medusa.json` +file. +You can learn more about this option [here](./fuzzing_config.md#corpusdirectory). diff --git a/docs/src/project_configuration/testing_config.md b/docs/src/project_configuration/testing_config.md new file mode 100644 index 00000000..a9e2f073 --- /dev/null +++ b/docs/src/project_configuration/testing_config.md @@ -0,0 +1,190 @@ +# Testing Configuration + +The testing configuration can be broken down into a few subcomponents: + +- **High-level configuration**: Configures global testing parameters, regardless of the type of testing. +- **Assertion testing configuration**: Configures what kind of EVM panics should be treated as a failing fuzz test. +- **Property testing configuration**: Configures what kind of function signatures should be treated as property tests. +- **Optimization testing configuration**: Configures what kind of function signatures should be treated as optimization tests. + +We will go over each subcomponent one-by-one: + +## High-level Configuration + +### `stopOnFailedTest` + +- **Type**: Boolean +- **Description**: Determines whether the fuzzer should stop execution after the first _failed_ test. If `false`, `medusa` + will continue fuzzing until either the [`testLimit`](./fuzzing_config.md#testlimit) is hit, the [`timeout`](./fuzzing_config.md#timeout) + is hit, or the user manually stops execution. +- **Default**: `true` + +### `stopOnFailedContractMatching` + +- **Type**: Boolean +- **Description**: Determines whether the fuzzer should stop execution if it is unable to match the bytecode of a dynamically + deployed contract. A dynamically deployed contract is one that is created during the fuzzing campaign + (versus one that is specified in the [`fuzzing.targetContracts`](./fuzzing_config.md#targetcontracts)). + Here is an example of a dynamically deployed contract: + +```solidity + +contract MyContract { + OtherContract otherContract; + constructor() { + // This is a dynamically deployed contract + otherContract = new otherContract(); + } +} +``` + +- **Default**: `false` + +### `stopOnNoTests` + +- **Type**: Boolean +- **Description**: Determines whether the fuzzer should stop execution if no tests are found + (property tests, assertion tests, optimization tests, or custom API-level tests). If `false` and no tests are found, + `medusa` will continue fuzzing until either the [`testLimit`](./fuzzing_config.md#testlimit) is hit, + the [`timeout`](./fuzzing_config.md#timeout) is hit, or the user manually stops execution. +- **Default**: `true` + +### `testAllContracts` + +- **Type**: Boolean +- **Description**: Determines whether all contracts should be tested (including dynamically deployed ones), rather than + just the contracts specified in the project configuration's [`fuzzing.targetContracts`](./fuzzing_config.md#targetcontracts). +- **Default**: `false` + +### `traceAll`: + +- **Type**: Boolean +- **Description**: Determines whether an `execution trace` should be attached to each element of a call sequence + that triggered a test failure. +- **Default**: `false` + +### `targetFunctionSignatures`: + +- **Type**: [String] +- **Description**: A list of function signatures that the fuzzer should exclusively target by omitting calls to other signatures. The signatures should specify the contract name and signature in the ABI format like `Contract.func(uint256,bytes32)`. + > **Note**: Property and optimization tests will always be called even if they are not explicitly specified in this list. +- **Default**: `[]` + +### `excludeFunctionSignatures`: + +- **Type**: [String] +- **Description**: A list of function signatures that the fuzzer should exclude from the fuzzing campaign. The signatures should specify the contract name and signature in the ABI format like `Contract.func(uint256,bytes32)`. + > **Note**: Property and optimization tests will always be called and cannot be excluded. +- **Default**: `[]` + +## Assertion Testing Configuration + +### `enabled` + +- **Type**: Boolean +- **Description**: Enable or disable assertion testing +- **Default**: `true` + +### `testViewMethods` + +- **Type**: Boolean +- **Description**: Whether `pure` / `view` functions should be tested for assertion failures. +- **Default**: `false` + +### `panicCodeConfig` + +- **Type**: Struct +- **Description**: This struct describes the various types of EVM-level panics that should be considered a "failing case". + By default, only an `assert(false)` is considered a failing case. However, these configuration options would allow a user + to treat arithmetic overflows or division by zero as failing cases as well. + +#### `failOnAssertion` + +- **Type**: Boolean +- **Description**: Triggering an assertion failure (e.g. `assert(false)`) should be treated as a failing case. +- **Default**: `true` + +#### `failOnCompilerInsertedPanic` + +- **Type**: Boolean +- **Description**: Triggering a compiler-inserted panic should be treated as a failing case. +- **Default**: `false` + +#### `failOnArithmeticUnderflow` + +- **Type**: Boolean +- **Description**: Arithmetic underflow or overflow should be treated as a failing case +- **Default**: `false` + +#### `failOnDivideByZero` + +- **Type**: Boolean +- **Description**: Dividing by zero should be treated as a failing case +- **Default**: `false` + +#### `failOnEnumTypeConversionOutOfBounds` + +- **Type**: Boolean +- **Description**: An out-of-bounds enum access should be treated as a failing case +- **Default**: `false` + +#### `failOnIncorrectStorageAccess` + +- **Type**: Boolean +- **Description**: An out-of-bounds storage access should be treated as a failing case +- **Default**: `false` + +#### `failOnPopEmptyArray` + +- **Type**: Boolean +- **Description**: A `pop()` operation on an empty array should be treated as a failing case +- **Default**: `false` + +#### `failOnOutOfBoundsArrayAccess` + +- **Type**: Boolean +- **Description**: An out-of-bounds array access should be treated as a failing case +- **Default**: `false` + +#### `failOnAllocateTooMuchMemory` + +- **Type**: Boolean +- **Description**: Overallocation/excessive memory usage should be treated as a failing case +- **Default**: `false` + +#### `failOnCallUninitializedVariable` + +- **Type**: Boolean +- **Description**: Calling an uninitialized variable should be treated as a failing case +- **Default**: `false` + +## Property Testing Configuration + +### `enabled` + +- **Type**: Boolean +- **Description**: Enable or disable property testing. +- **Default**: `true` + +### `testPrefixes` + +- **Type**: [String] +- **Description**: The list of prefixes that the fuzzer will use to determine whether a given function is a property test or not. + For example, if `property_` is a test prefix, then any function name in the form `property_*` may be a property test. + > **Note**: If you are moving over from Echidna, you can add `echidna_` as a test prefix to quickly port over the property tests from it. +- **Default**: `[property_]` + +## Optimization Testing Configuration + +### `enabled` + +- **Type**: Boolean +- **Description**: Enable or disable optimization testing. +- **Default**: `true` + +### `testPrefixes` + +- **Type**: [String] +- **Description**: The list of prefixes that the fuzzer will use to determine whether a given function is an optimization + test or not. For example, if `optimize_` is a test prefix, then any function name in the form `optimize_*` may be a property test. +- **Default**: `[optimize_]` diff --git a/docs/src/static/contract_deployment.png b/docs/src/static/contract_deployment.png new file mode 100644 index 00000000..a021b01c Binary files /dev/null and b/docs/src/static/contract_deployment.png differ diff --git a/docs/src/static/coverage.png b/docs/src/static/coverage.png new file mode 100644 index 00000000..f15676ea Binary files /dev/null and b/docs/src/static/coverage.png differ diff --git a/docs/src/static/custom.css b/docs/src/static/custom.css new file mode 100644 index 00000000..e4565d1a --- /dev/null +++ b/docs/src/static/custom.css @@ -0,0 +1,6 @@ +img[alt="medusa_logo"] { + width: 60%; + margin-left: auto; + margin-right: auto; + display: block; +} diff --git a/docs/src/static/function_level_testing_medusa.json b/docs/src/static/function_level_testing_medusa.json new file mode 100644 index 00000000..79f2aa21 --- /dev/null +++ b/docs/src/static/function_level_testing_medusa.json @@ -0,0 +1,72 @@ +{ + "fuzzing": { + "workers": 10, + "workerResetLimit": 50, + "timeout": 0, + "testLimit": 1000, + "callSequenceLength": 1, + "corpusDirectory": "", + "coverageEnabled": true, + "targetContracts": ["TestDepositContract"], + "targetContractsBalances": ["0xfffffffffffffffffffffffffffffff"], + "constructorArgs": {}, + "deployerAddress": "0x30000", + "senderAddresses": ["0x10000", "0x20000", "0x30000"], + "blockNumberDelayMax": 60480, + "blockTimestampDelayMax": 604800, + "blockGasLimit": 125000000, + "transactionGasLimit": 12500000, + "testing": { + "stopOnFailedTest": true, + "stopOnFailedContractMatching": false, + "stopOnNoTests": true, + "testAllContracts": false, + "traceAll": false, + "assertionTesting": { + "enabled": true, + "testViewMethods": false, + "panicCodeConfig": { + "failOnCompilerInsertedPanic": false, + "failOnAssertion": true, + "failOnArithmeticUnderflow": false, + "failOnDivideByZero": false, + "failOnEnumTypeConversionOutOfBounds": false, + "failOnIncorrectStorageAccess": false, + "failOnPopEmptyArray": false, + "failOnOutOfBoundsArrayAccess": false, + "failOnAllocateTooMuchMemory": false, + "failOnCallUninitializedVariable": false + } + }, + "propertyTesting": { + "enabled": true, + "testPrefixes": ["property_"] + }, + "optimizationTesting": { + "enabled": true, + "testPrefixes": ["optimize_"] + } + }, + "chainConfig": { + "codeSizeCheckDisabled": true, + "cheatCodes": { + "cheatCodesEnabled": true, + "enableFFI": false + } + } + }, + "compilation": { + "platform": "crytic-compile", + "platformConfig": { + "target": "test.sol", + "solcVersion": "", + "exportDirectory": "", + "args": [] + } + }, + "logging": { + "level": "info", + "logDirectory": "", + "noColor": false + } +} diff --git a/docs/src/static/medusa.json b/docs/src/static/medusa.json new file mode 100644 index 00000000..8d08a8d0 --- /dev/null +++ b/docs/src/static/medusa.json @@ -0,0 +1,77 @@ +{ + "fuzzing": { + "workers": 10, + "workerResetLimit": 50, + "timeout": 0, + "testLimit": 0, + "shrinkLimit": 5000, + "callSequenceLength": 100, + "corpusDirectory": "", + "coverageEnabled": true, + "targetContracts": [], + "predeployedContracts": {}, + "targetContractsBalances": [], + "constructorArgs": {}, + "deployerAddress": "0x30000", + "senderAddresses": ["0x10000", "0x20000", "0x30000"], + "blockNumberDelayMax": 60480, + "blockTimestampDelayMax": 604800, + "blockGasLimit": 125000000, + "transactionGasLimit": 12500000, + "testing": { + "stopOnFailedTest": true, + "stopOnFailedContractMatching": false, + "stopOnNoTests": true, + "testAllContracts": false, + "traceAll": false, + "assertionTesting": { + "enabled": true, + "testViewMethods": false, + "panicCodeConfig": { + "failOnCompilerInsertedPanic": false, + "failOnAssertion": true, + "failOnArithmeticUnderflow": false, + "failOnDivideByZero": false, + "failOnEnumTypeConversionOutOfBounds": false, + "failOnIncorrectStorageAccess": false, + "failOnPopEmptyArray": false, + "failOnOutOfBoundsArrayAccess": false, + "failOnAllocateTooMuchMemory": false, + "failOnCallUninitializedVariable": false + } + }, + "propertyTesting": { + "enabled": true, + "testPrefixes": ["property_"] + }, + "optimizationTesting": { + "enabled": true, + "testPrefixes": ["optimize_"] + }, + "targetFunctionSignatures": [], + "excludeFunctionSignatures": [] + }, + "chainConfig": { + "codeSizeCheckDisabled": true, + "cheatCodes": { + "cheatCodesEnabled": true, + "enableFFI": false + }, + "skipAccountChecks": true + } + }, + "compilation": { + "platform": "crytic-compile", + "platformConfig": { + "target": ".", + "solcVersion": "", + "exportDirectory": "", + "args": [] + } + }, + "logging": { + "level": "info", + "logDirectory": "", + "noColor": false + } +} diff --git a/docs/src/static/medusa_logo.png b/docs/src/static/medusa_logo.png new file mode 100755 index 00000000..11c6061e Binary files /dev/null and b/docs/src/static/medusa_logo.png differ diff --git a/docs/src/testing/coverage_reports.md b/docs/src/testing/coverage_reports.md new file mode 100644 index 00000000..c0441625 --- /dev/null +++ b/docs/src/testing/coverage_reports.md @@ -0,0 +1,43 @@ +## Coverage Reports + +### Generating HTML Report from LCOV + +Enable coverage reporting by setting the `corpusDirectory` key in the configuration file and setting the `coverageReports` key to `["lcov", "html"]`. + +```json +{ + "corpusDirectory": "corpus", + "coverageReports": ["lcov", "html"] +} +``` + +### Install lcov and genhtml + +Linux: + +```bash +apt-get install lcov +``` + +MacOS: + +```bash +brew install lcov +``` + +### Generate LCOV Report + +```bash +genhtml corpus/coverage/lcov.info --output-dir corpus --rc derive_function_end_line=0 +``` + +> [!WARNING] +> ** The `derive_function_end_line` flag is required to prevent the `genhtml` tool from crashing when processing the Solidity source code. ** + +Open the `corpus/index.html` file in your browser or follow the steps to use VSCode below. + +### View Coverage Report in VSCode with Coverage Gutters + +Install the [Coverage Gutters](https://marketplace.visualstudio.com/items?itemName=ryanluker.vscode-coverage-gutters) extension. + +Then, right click in a project file and select `Coverage Gutters: Display Coverage`. diff --git a/docs/src/testing/fuzzing_lifecycle.md b/docs/src/testing/fuzzing_lifecycle.md new file mode 100644 index 00000000..278930e1 --- /dev/null +++ b/docs/src/testing/fuzzing_lifecycle.md @@ -0,0 +1,136 @@ +# The Fuzzing Lifecycle + +Understanding what `medusa` is doing under-the-hood significantly aids in understanding how to fuzz smart contracts +and also in writing fuzz tests. This chapter will walk you through the process of deploying the target contracts, +generating and executing call sequences, using the corpus, updating coverage, and resetting the blockchain. + +## Contract deployment + +The contract deployment process will deploy each contract specified in the +[`fuzzing.targetContracts`](../project_configuration/fuzzing_config.md#targetcontracts) one-by-one. Any contracts that +are dynamically deployed during the _construction_ of a target contract are also deployed. The deployment of these +contracts are done by the configurable +[`fuzzing.deployerAddress`](../project_configuration/fuzzing_config.md#deployeraddress) address. + +We will call the state of the blockchain after all the target contracts are deployed as the **"initial deployment state"**. +This is the state of the blockchain before any transactions have been executed by the fuzzer. Here is what the underlying +blockchain would look like after the deployment of contracts `A`, `B`, `C` (assuming no other dynamic deployments). + +![Contract Deployment Diagram](../static/contract_deployment.png) + +Now that we have our target contracts deployed, we can start executing call sequences! + +## Call sequence execution + +Call sequence execution is the crux of the fuzzing lifecycle. At a high-level, call sequence execution is the **iterative +process of executing fuzzed transactions on the initial deployment state** in hopes of violating (or validating) an invariant. +Before continuing, it is important to understand what a call sequence is. + +### Defining a call sequence + +A call sequence is an **array of individual transactions**. The length of the array is governed by the +[`fuzzing.callSequenceLength`](../project_configuration/fuzzing_config.md#callsequencelength) configuration parameter. +The fuzzer will maintain EVM state for the duration of a call sequence before [resetting the state](#resetting-the-blockchain). +Thus, if you have a call sequence length of 10, `medusa` will execute 10 transactions, maintain state throughout that +process, and then wipe the EVM state. Having a call sequence length of 1 means that `medusa` will wipe the state after +each transaction. This is useful for fuzz testing arithmetic libraries or isolated functions. + +Now that you know what a call sequence is, let's discuss how to generate a call sequence. + +### Generating a call sequence + +Call sequence generation can happen in two main ways: + +1. Generate a completely random call sequence +2. Mutate an existing call sequence from the corpus + +Let's talk about each possibility. Generating a completely random call sequence is straightforward. If we have a +`fuzzing.callSequenceLength` of 50, we will generate 50 random transactions. + +> **Definition**: A random transaction is a call to a random method in one of the target contracts. Any input arguments +> to the method are fuzzed values. + +The second possibility is more nuanced. To understand how to mutate an existing call sequence from the corpus, we need +to first discuss the idea of coverage and what a corpus is. + +### Coverage and the corpus + +Tracking coverage is one of the most powerful features of `medusa`. + +> **Definition**: Coverage is a measure of what parts of the code have been executed by the fuzzer + +Coverage is tracked in a rather simple fashion. For each target contract, we maintain a byte array where the length of the +byte array is equal to the length of that contract's bytecode. If a certain transaction caused us to execute an opcode +that we had not executed before, we increased coverage of that contract. + +![Coverage Tracking Diagram](../static/coverage.png) + +As shown in the figure above, the `CALL` opcode was just executed causing the coverage array's value to be updated at that +index. The next natural question is, how do we harness this information to improve the fuzzer? + +This is where the idea of a **corpus** comes in. + +> **Definition**: The corpus is a structure that holds "interesting" or "coverage-increasing" call sequences. + +Thus, when `medusa` runs, if it finds a call sequence that increased its coverage of the system, it will add it to the corpus. +These call sequences are invaluable to `medusa` because they allowed it to explore a larger portion of the system. This is +what makes `medusa` a **coverage-guided fuzzer**. + +> **Definition**: A coverage-guided fuzzer is one that aims to maximize its coverage of the system. + +Tracking coverage and storing coverage-increasing sequences in the corpus also allows `medusa` to re-use these sequences. +This takes us back to the second possibility when generating call sequences: mutating an existing sequence from the corpus. + +The reason we re-use call sequences is that we know that the call sequence in question improved our coverage. So, we +might as well re-use it, **mutate** it, and then execute that mutated call sequence in hopes of further increasing our coverage. +There are a variety of mutational strategies that `medusa` employs. For example, `medusa` can take a call sequence from the corpus and append a new random +transaction at the end of it. This is called **mutational fuzzing**. + +> **Definition**: Mutational fuzzing is the practice of taking existing data samples and generating new variants of them +> (mutants). + +Now that we know what a call sequence is, how to generate them, and how to track coverage, we can finally discuss how +these call sequences are executed. + +### Executing the call sequence + +Call sequence execution happens in an _iterative_ fashion. Here is some pseudocode on how it happens: + +``` +# Generate a new call sequence or mutate one from the corpus +sequence = generator.NewCallSequence() + +# Iteratively execute each call in the call sequence +for i < len(sequence) { + # Retrieve the i-th element in the sequence + tx = sequence[i] + + # Run the transaction on the blockchain and retrieve the result + result = blockchain.executeTransaction(tx) + + # Update coverage + increasedCoverage = coverageTracker.updateCoverage() + + # If coverage increased, add sequence[:i+1] to the corpus + if increasedCoveraged { + corpus.addCallSequence(tx[:i+1]) + } + + # Check for invariant failures + encounteredFailure = tester.checkForInvariantFailures(result) + + # Let user know we had a failing test case + if encounteredFailure { + reportFailedTestCase() + } +} +``` + +The one portion of the above pseudocode that we did not discuss is checking for invariant failures. We will discuss +the different types of invariants and what an invariant failure means in the [next chapter](./invariants.md). + +## Resetting the blockchain + +The final step in the fuzzing lifecycle is resetting the blockchain. Resetting the blockchain is as simple as reverting +to the "initial deployment state" of the blockchain. Once we reset back to the "initial deployment state", we can now generate and execute +another call sequence! diff --git a/docs/src/testing/invariants.md b/docs/src/testing/invariants.md new file mode 100644 index 00000000..4994c81b --- /dev/null +++ b/docs/src/testing/invariants.md @@ -0,0 +1,69 @@ +# Types of Invariants + +As discussed in the [testing overview](./overview.md) chapter, invariants describe the "truths" of your system. These +are unchanging properties that arise from the design of a codebase. + +> **Note**: We will interchange the use of the word property and invariant often. For all intents and purposes, they +> mean the same thing. + +Defining and testing your invariants is critical to assessing the **expected system behavior**. + +We like to break down invariants into two general categories: function-level invariants and system-level invariants. +Note that there are other ways of defining and scoping invariants, but this distinction is generally sufficient to +start fuzz testing even the most complex systems. + +## Function-level invariants + +A function-level invariant can be defined as follows: + +> **Definition**: A function-level invariant is a property that arises from the execution of a specific function. + +Let's take the following function from a smart contract: + +```solidity +function deposit() public payable { + // Make sure that the total deposited amount does not exceed the limit + uint256 amount = msg.value; + require(totalDeposited + amount <= MAX_DEPOSIT_AMOUNT); + + // Update the user balance and total deposited + balances[msg.sender] += amount; + totalDeposited += amount; + + emit Deposit(msg.sender, amount, totalDeposited); +} +``` + +The `deposit` function has the following function-level invariants: + +1. The ETH balance of `msg.sender` must decrease by `amount`. +2. The ETH of `address(this)` must increase by `amount`. +3. `balances[msg.sender]` should increase by `amount`. +4. The `totalDeposited` value should increase by `amount`. + +Note that there other properties that can also be tested for but the above should highlight what a function-level +invariant is. In general, function-level invariants can be identified by assessing what must be true _before_ the execution +of a function and what must be true _after_ the execution of that same function. In the next chapter, we will write a +fuzz test to test the `deposit` function and how to use medusa to run that test. + +Let's now look at system-level invariants. + +## System-level invariants + +A system-level invariant can be defined as follows: + +> **Definition**: A system-level invariant is a property that holds true across the _entire_ execution of a system + +Thus, a system-level invariant is a lot more generalized than a function-level invariant. Here are two common examples +of a function-level invariant: + +1. The `xy=k` constant product formula should always hold for Uniswap pools +2. No user's balance should ever exceed the total supply for an ERC20 token. + +In the `deposit` function above, we also see the presence of a system-level invariant: + +**The `totalDeposited` amount should always be less than or equal to the `MAX_DEPOSIT_AMOUNT`**. + +Since the `totalDeposited` value can be affected by the presence of other functions in the system +(e.g. `withdraw` or `stake`), it is best tested at the system level instead of the function level. We will look at how +to write system-level invariants in the [Writing System-Level Invariants](./writing-system-level-invariants.md) chapter. diff --git a/docs/src/testing/overview.md b/docs/src/testing/overview.md new file mode 100644 index 00000000..0ee8181c --- /dev/null +++ b/docs/src/testing/overview.md @@ -0,0 +1,26 @@ +# Testing Overview + +This chapter discusses the overarching goal of smart contract fuzzing. + +Traditional fuzz testing (e.g. with [`AFL`](https://lcamtuf.coredump.cx/afl/)) aims to generally explore a binary by providing +random inputs in an effort to identify new system states or crash the program (please note that this is a pretty crude generalization). +This model, however, does not translate to the smart contract ecosystem since you cannot cause a smart contract to "crash". +A transaction that reverts, for example, is not equivalent to a binary crashing or panicking. + +Thus, with smart contracts, we have to change the fuzzing paradigm. When you hear of "fuzzing smart contracts", you are +not trying to crash the program but, instead, you are trying to validate the **invariants** of the program. + +> **Definition**: An invariant is a property that remains unchanged after one or more operations are applied to it. + +More generally, an invariant is a "truth" about some system. For smart contracts, this can take many faces. + +1. **Mathematical invariants**: `a + b = b + a`. The commutative property is an invariant and any Solidity math library + should uphold this property. +2. **ERC20 tokens**: The sum of all user balances should never exceed the total supply of the token. +3. **Automated market maker (e.g. Uniswap)**: `xy = k`. The constant-product formula is an invariant that maintains the + economic guarantees of AMMs such as Uniswap. + +> **Definition**: Smart contract fuzzing uses random sequences of transactions to test the invariants of the smart contract system. + +Before we explore how to identify, write, and test invariants, it is beneficial to understand how smart contract fuzzing +works under-the-hood. diff --git a/docs/src/testing/tips.md b/docs/src/testing/tips.md new file mode 100644 index 00000000..a2de2928 --- /dev/null +++ b/docs/src/testing/tips.md @@ -0,0 +1,32 @@ +## Tips for Testing with Medusa + +### General + +- **Use multiple testing modes:** Medusa supports property testing, assertion testing, and optimization testing. Use a combination of modes to thoroughly test your contracts. +- **Write clear and concise tests:** Your tests should be easy to read and understand. Avoid complex logic or unnecessary code. +- **Test edge cases:** Consider testing extreme values and unusual inputs to ensure your contracts handle them correctly. +- **Use a variety of test inputs:** Generate a diverse set of test inputs to cover a wide range of scenarios. +- **Monitor gas consumption:** Medusa can track gas consumption during testing. Use this information to identify areas where your contracts can be optimized. + +### Property Testing + +- **Choose meaningful properties:** The properties you test should be important invariants of your contract. + +### Assertion Testing + +- **Use assertions judiciously:** Assertions can be useful for catching errors, but they can also slow down testing. Use them only when necessary. +- **Test for both valid and invalid inputs:** Ensure your assertions check for both valid and invalid inputs to thoroughly test your contract's behavior. +- **Use pre-conditions and post-conditions to verify the state of the contract before and after a function call.:** Pre-conditions and post-conditions are assertions that can be used to verify the state of the contract before and after a function call. This can help to ensure that the function is called with the correct inputs, that it produces the expected outputs, and that the state of the contract is valid. + +### Optimization Testing + +- **Choose a meaningful optimization goal:** The goal of your optimization test should be to maximize a specific metric, such as the return value of a function. +- **Use a variety of optimization techniques:** Medusa supports multiple optimization techniques, such as genetic algorithms and simulated annealing. Consider using different techniques to find the best solution. + +### Additional Tips + +- **Use a configuration file:** A configuration file allows you to customize Medusa's behavior and specify additional testing parameters. +- **Use corpus and coverage information to improve the effectiveness of your fuzzing campaigns:** Corpus and coverage information can be used to improve the effectiveness of your fuzzing campaigns by providing feedback on the quality of the test inputs. +- **Run Medusa in parallel:** Medusa can run tests in parallel to speed up the testing process. +- **Review the test results carefully:** Medusa provides detailed test results. Take the time to review them carefully and identify any potential issues. +- **Use Medusa as part of your development process:** Integrate Medusa into your development workflow to regularly test your contracts and identify potential bugs early on. diff --git a/docs/src/testing/writing-function-level-invariants.md b/docs/src/testing/writing-function-level-invariants.md new file mode 100644 index 00000000..ebe4e856 --- /dev/null +++ b/docs/src/testing/writing-function-level-invariants.md @@ -0,0 +1,145 @@ +## Writing Function-Level Invariants + +This chapter will walk you through writing function-level fuzz tests for the `deposit` function that we saw in the [previous chapter](./invariants.md#function-level-invariants). + +Before we write the fuzz tests, let's look into how we would write a unit test for the `deposit` function: + +```solidity +function testDeposit() public { + // The amount of tokens to deposit + uint256 amount = 10 ether; + + // Retrieve balance of user before deposit + preBalance = depositContract.balances(address(this)); + + // Call the deposit contract (let's assume this contract has 10 ether) + depositContract.deposit{value: amount}(); + + // Assert post-conditions + assert(depositContract.balances(msg.sender) == preBalance + amount); + // Add other assertions here +} +``` + +What we will notice about the test above is that it _fixes_ the value that is being sent. It is unable to test how the +`deposit` function behaves across a variety of input spaces. Thus, a function-level fuzz test can be thought of as a +"unit test on steroids". Instead of fixing the `amount`, we let the fuzzer control the `amount` value to any number between +`[0, type(uint256).max]` and see how the system behaves to that. + +> **Note**: One of the core differences between a traditional unit test versus a fuzz test is that a fuzz test accepts input arguments that the fuzzer can control. + +### Writing a Fuzz Test for the `deposit` Function + +Here is what a fuzz test for the `deposit` function would look like: + +```solidity +function testDeposit(uint256 _amount) public { + // Let's bound the input to be _at most_ the ETH balance of this contract + // The amount value will now in between [0, address(this).balance] + uint256 amount = clampLte(_amount, address(this).balance); + + // Retrieve balance of user before deposit + uint256 preBalance = depositContract.balances(address(this)); + + // Call the deposit contract with a variable amount + depositContract.deposit{value: _amount}(); + + // Assert post-conditions + assert(depositContract.balances(address(this)) == preBalance + amount); + // Add other assertions here +} +``` + +Notice that we bounded the `_amount` variable to be less than or equal to the test contract's ETH balance. +This type of bounding is very common when writing fuzz tests. Bounding allows you to only test values that are reasonable. +If `address(this)` doesn't have enough ETH, it does not make sense to try and call the `deposit` function. Additionally, +although we only tested one of the function-level invariants from the [previous chapter](./invariants.md), writing the remaining +would follow a similar pattern as the one written above. + +## Running a function-level test with medusa + +Let's now run the above example with medusa. Here is the test code: + +```solidity +contract DepositContract { + // @notice MAX_DEPOSIT_AMOUNT is the maximum amount that can be deposited into this contract + uint256 public constant MAX_DEPOSIT_AMOUNT = 1_000_000e18; + + // @notice balances holds user balances + mapping(address => uint256) public balances; + + // @notice totalDeposited represents the current deposited amount across all users + uint256 public totalDeposited; + + // @notice Deposit event is emitted after a deposit occurs + event Deposit(address depositor, uint256 amount, uint256 totalDeposited); + + // @notice deposit allows user to deposit into the system + function deposit() public payable { + // Make sure that the total deposited amount does not exceed the limit + uint256 amount = msg.value; + require(totalDeposited + amount <= MAX_DEPOSIT_AMOUNT); + + // Update the user balance and total deposited + balances[msg.sender] += amount; + totalDeposited += amount; + + emit Deposit(msg.sender, amount, totalDeposited); + } +} + +contract TestDepositContract { + + // @notice depositContract is an instance of DepositContract + DepositContract depositContract; + + constructor() payable { + // Deploy the deposit contract + depositContract = new DepositContract(); + } + + // @notice testDeposit tests the DepositContract.deposit function + function testDeposit(uint256 _amount) public { + // Let's bound the input to be _at most_ the ETH balance of this contract + // The amount value will now in between [0, address(this).balance] + uint256 amount = clampLte(_amount, address(this).balance); + + // Retrieve balance of user before deposit + uint256 preBalance = depositContract.balances(address(this)); + + // Call the deposit contract with a variable amount + depositContract.deposit{value: _amount}(); + + // Assert post-conditions + assert(depositContract.balances(address(this)) == preBalance + amount); + // Add other assertions here + } + + // @notice clampLte returns a value between [a, b] + function clampLte(uint256 a, uint256 b) internal returns (uint256) { + if (!(a <= b)) { + uint256 value = a % (b + 1); + return value; + } + return a; + } + +} +``` + +To run this test contract, download the project configuration file [here](../static/function_level_testing_medusa.json), +rename it to `medusa.json`, and run: + +``` +medusa fuzz --config medusa.json +``` + +The following changes were made to the default project configuration file to allow this test to run: + +- `fuzzing.targetContracts`: The `fuzzing.targetContracts` value was updated to `["TestDepositContract"]`. +- `fuzzing.targetContractsBalances`: The `fuzzing.targetContractsBalances` was updated to `["0xfffffffffffffffffffffffffffffff"]` + to allow the `TestDepositContract` contract to have an ETH balance allowing the fuzzer to correctly deposit funds into the + `DepositContract`. +- `fuzzing.testLimit`: The `fuzzing.testLimit` was set to `1_000` to shorten the duration of the fuzzing campign. +- `fuzzing.callSequenceLength`: The `fuzzing.callSequenceLength` was set to `1` so that the `TestDepositContract` can be + reset with its full ETH balance after each transaction. diff --git a/docs/src/testing/writing-system-level-invariants.md b/docs/src/testing/writing-system-level-invariants.md new file mode 100644 index 00000000..5cf2b0e1 --- /dev/null +++ b/docs/src/testing/writing-system-level-invariants.md @@ -0,0 +1,3 @@ +## Writing System-Level Invariants with Medusa + +WIP diff --git a/docs/src/testing/writing-tests.md b/docs/src/testing/writing-tests.md new file mode 100644 index 00000000..f8aacca2 --- /dev/null +++ b/docs/src/testing/writing-tests.md @@ -0,0 +1,189 @@ +# Testing with `medusa` + +`medusa`, like Echidna, supports the following testing modes: + +1. [Property Mode](https://secure-contracts.com/program-analysis/echidna/introduction/how-to-test-a-property.html) +2. [Assertion Mode](https://secure-contracts.com/program-analysis/echidna/basic/assertion-checking.html) +3. [Optimization Mode](https://secure-contracts.com/program-analysis/echidna/advanced/optimization_mode.html) + +For more advanced information and documentation on how the various modes work and their pros/cons, check out [secure-contracts.com](https://secure-contracts.com/program-analysis/echidna/index.html) + +## Writing property tests + +Property tests are represented as functions within a Solidity contract whose names are prefixed with a prefix specified by the `testPrefixes` configuration option (`fuzz_` is the default test prefix). Additionally, they must take no arguments and return a `bool` indicating if the test succeeded. + +```solidity +contract TestXY { + uint x; + uint y; + + function setX(uint value) public { + x = value + 3; + } + + function setY(uint value) public { + y = value + 9; + } + + function fuzz_never_specific_values() public returns (bool) { + // ASSERTION: x should never be 10 at the same time y is 80 + return !(x == 10 && y == 80); + } +} +``` + +`medusa` deploys your contract containing property tests and generates a sequence of calls to execute against all publicly accessible methods. After each function call, it calls upon your property tests to ensure they return a `true` (success) status. + +### Testing in property-mode + +To begin a fuzzing campaign in property-mode, you can run `medusa fuzz` or `medusa fuzz --config [config_path]`. + +> **Note**: Learn more about running `medusa` with its CLI [here](../cli/overview.md). + +Invoking this fuzzing campaign, `medusa` will: + +- Compile the given targets +- Start the configured number of worker threads, each with their own local Ethereum test chain. +- Deploy all contracts to each worker's test chain. +- Begin to generate and send call sequences to update contract state. +- Check property tests all succeed after each call executed. + +Upon discovery of a failed property test, `medusa` will halt, reporting the call sequence used to violate any property test(s): + +``` +[FAILED] Property Test: TestXY.fuzz_never_specific_values() +Test "TestXY.fuzz_never_specific_values()" failed after the following call sequence: +1) TestXY.setY([71]) (gas=4712388, gasprice=1, value=0, sender=0x2222222222222222222222222222222222222222) +2) TestXY.setX([7]) (gas=4712388, gasprice=1, value=0, sender=0x3333333333333333333333333333333333333333) +``` + +## Writing assertion tests + +Although both property-mode and assertion-mode try to validate / invalidate invariants of the system, they do so in different ways. In property-mode, `medusa` will look for functions with a specific test prefix (e.g. `fuzz_`) and test those. In assertion-mode, `medusa` will test to see if a given call sequence can cause the Ethereum Virtual Machine (EVM) to "panic". The EVM has a variety of panic codes for different scenarios. For example, there is a unique panic code when an `assert(x)` statement returns `false` or when a division by zero is encountered. In assertion mode, which panics should or should not be treated as "failing test cases" can be toggled by updating the [Project Configuration](../project_configuration/fuzzing_config.md#fuzzing-configuration). By default, only `FailOnAssertion` is enabled. Check out the [Example Project Configuration File](https://github.com/crytic/medusa/wiki/Example-Project-Configuration-File) for a visualization of the various panic codes that can be enabled. An explanation of the various panic codes can be found in the [Solidity documentation](https://docs.soliditylang.org/en/latest/control-structures.html#panic-via-assert-and-error-via-require). + +Please note that the behavior of assertion mode is different between `medusa` and Echidna. Echidna will only test for `assert(x)` statements while `medusa` provides additional flexibility. + +```solidity +contract TestContract { + uint x; + uint y; + + function setX(uint value) public { + x = value; + + // ASSERTION: x should be an even number + assert(x % 2 == 0); + } +} +``` + +During a call sequence, if `setX` is called with a `value` that breaks the assertion (e.g. `value = 3`), `medusa` will treat this as a failing property and report it back to the user. + +### Testing in assertion-mode + +To begin a fuzzing campaign in assertion-mode, you can run `medusa fuzz --assertion-mode` or `medusa fuzz --config [config_path] --assertion-mode`. + +> **Note**: Learn more about running `medusa` with its CLI [here](../cli/overview.md). + +Invoking this fuzzing campaign, `medusa` will: + +- Compile the given targets +- Start the configured number of worker threads, each with their own local Ethereum test chain. +- Deploy all contracts to each worker's test chain. +- Begin to generate and send call sequences to update contract state. +- Check to see if there any failing assertions after each call executed. + +Upon discovery of a failed assertion, `medusa` will halt, reporting the call sequence used to violate any assertions: + +``` +Fuzzer stopped, test results follow below ... +[FAILED] Assertion Test: TestContract.setX(uint256) +Test for method "TestContract.setX(uint256)" failed after the following call sequence resulted in an assertion: +1) TestContract.setX([102552480437485684723695021980667056378352338398148431990087576385563741034353]) (block=2, time=4, gas=12500000, gasprice=1, value=0, sender=0x1111111111111111111111111111111111111111) +``` + +## Writing optimization tests + +Optimization mode's goal is not to validate/invalidate properties but instead to maximize the return value of a function. Similar to property mode, these functions must be prefixed with a prefix specified by the `testPrefixes` configuration option (`optimize_` is the default test prefix). Additionally, they must take no arguments and return an `int256`. A good use case for optimization mode is to try to quantify the impact of a bug (e.g. a rounding error). + +```solidity +contract TestContract { + int256 input; + + function set(int256 _input) public { + input = _input; + } + + function optimize_opt_linear() public view returns (int256) { + if (input > -4242) return -input; + else return 0; + } +} +``` + +`medusa` deploys your contract containing optimization tests and generates a sequence of calls to execute against all publicly accessible methods. After each function call, it calls upon your otpimization tests to identify whether the return value of those tests are greater than the currently stored values. + +### Testing in optimization-mode + +To begin a fuzzing campaign in optimization-mode, you can run `medusa fuzz --optimization-mode` or `medusa fuzz --config [config_path] --optimization-mode`. + +> **Note**: Learn more about running `medusa` with its CLI [here](../cli/overview.md). + +Invoking this fuzzing campaign, `medusa` will: + +- Compile the given targets +- Start the configured number of worker threads, each with their own local Ethereum test chain. +- Deploy all contracts to each worker's test chain. +- Begin to generate and send call sequences to update contract state. +- Check to see if the return value of the optimization test is greater than the cached value. + - If the value is greater, update the cached value. + +Once the test limit or timeout for the fuzzing campaign has been reached, `medusa` will halt and report the call sequence that maximized the return value of the function: + +``` +Fuzzer stopped, test results follow below ... +[PASSED] Optimization Test: TestContract.optimize_opt_linear() +Optimization test "TestContract.optimize_opt_linear()" resulted in the maximum value: 4241 with the following sequence: +1) TestContract.set(-4241) (block=2, time=3, gas=12500000, gasprice=1, value=0, sender=0x0000000000000000000000000000000000010000) +``` + +## Testing with multiple modes + +Note that we can run `medusa` with one, many, or no modes enabled. Running `medusa fuzz --assertion-mode --optimization-mode` will run all three modes at the same time, since property-mode is enabled by default. If a project configuration file is used, any combination of the three modes can be toggled. In fact, all three modes can be disabled and `medusa` will still run. Please review the [Project Configuration](https://github.com/crytic/medusa/wiki/Project-Configuration) wiki page and the [Project Configuration Example](https://github.com/crytic/medusa/wiki/Example-Project-Configuration-File) for more information. + +```solidity +contract TestContract { + int256 input; + + function set(int256 _input) public { + input = _input; + } + + function failing_assert_method(uint value) public { + // ASSERTION: We always fail when you call this function. + assert(false); + } + + function fuzz_failing_property() public view returns (bool) { + // ASSERTION: fail immediately. + return false; + } + + function optimize_opt_linear() public view returns (int256) { + if (input > -4242) return -input; + else return 0; + } +} +``` + +Invoking a fuzzing campaign with `medusa fuzz --assertion-mode --optimization-mode` (note all three modes are enabled), `medusa` will: + +- Compile the given targets +- Start the configured number of worker threads, each with their own local Ethereum test chain. +- Deploy all contracts to each worker's test chain. +- Begin to generate and send call sequences to update contract state. +- Check to see: + - If property tests all succeed after each call executed. + - If a panic (which was enabled in the project configuration) has been triggered after each call. + - Whether the return value of the optimization test is greater than the cached value. + - Update the cached value if it is greater. diff --git a/docs/theme/favicon.png b/docs/theme/favicon.png new file mode 100755 index 00000000..a72d998f Binary files /dev/null and b/docs/theme/favicon.png differ diff --git a/docs/theme/favicon.svg b/docs/theme/favicon.svg new file mode 100755 index 00000000..78259cf9 --- /dev/null +++ b/docs/theme/favicon.svg @@ -0,0 +1,129 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/theme/highlight.js b/docs/theme/highlight.js new file mode 100644 index 00000000..8d926897 --- /dev/null +++ b/docs/theme/highlight.js @@ -0,0 +1,6 @@ +/* + Highlight.js 10.1.1 (93fd0d73) + License: BSD-3-Clause + Copyright (c) 2006-2020, Ivan Sagalaev +*/ +var hljs=function(){"use strict";function e(n){Object.freeze(n);var t="function"==typeof n;return Object.getOwnPropertyNames(n).forEach((function(r){!Object.hasOwnProperty.call(n,r)||null===n[r]||"object"!=typeof n[r]&&"function"!=typeof n[r]||t&&("caller"===r||"callee"===r||"arguments"===r)||Object.isFrozen(n[r])||e(n[r])})),n}class n{constructor(e){void 0===e.data&&(e.data={}),this.data=e.data}ignoreMatch(){this.ignore=!0}}function t(e){return e.replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/'/g,"'")}function r(e,...n){var t={};for(const n in e)t[n]=e[n];return n.forEach((function(e){for(const n in e)t[n]=e[n]})),t}function a(e){return e.nodeName.toLowerCase()}var i=Object.freeze({__proto__:null,escapeHTML:t,inherit:r,nodeStream:function(e){var n=[];return function e(t,r){for(var i=t.firstChild;i;i=i.nextSibling)3===i.nodeType?r+=i.nodeValue.length:1===i.nodeType&&(n.push({event:"start",offset:r,node:i}),r=e(i,r),a(i).match(/br|hr|img|input/)||n.push({event:"stop",offset:r,node:i}));return r}(e,0),n},mergeStreams:function(e,n,r){var i=0,s="",o=[];function l(){return e.length&&n.length?e[0].offset!==n[0].offset?e[0].offset"}function u(e){s+=""}function d(e){("start"===e.event?c:u)(e.node)}for(;e.length||n.length;){var g=l();if(s+=t(r.substring(i,g[0].offset)),i=g[0].offset,g===e){o.reverse().forEach(u);do{d(g.splice(0,1)[0]),g=l()}while(g===e&&g.length&&g[0].offset===i);o.reverse().forEach(c)}else"start"===g[0].event?o.push(g[0].node):o.pop(),d(g.splice(0,1)[0])}return s+t(r.substr(i))}});const s="",o=e=>!!e.kind;class l{constructor(e,n){this.buffer="",this.classPrefix=n.classPrefix,e.walk(this)}addText(e){this.buffer+=t(e)}openNode(e){if(!o(e))return;let n=e.kind;e.sublanguage||(n=`${this.classPrefix}${n}`),this.span(n)}closeNode(e){o(e)&&(this.buffer+=s)}value(){return this.buffer}span(e){this.buffer+=``}}class c{constructor(){this.rootNode={children:[]},this.stack=[this.rootNode]}get top(){return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(e){this.top.children.push(e)}openNode(e){const n={kind:e,children:[]};this.add(n),this.stack.push(n)}closeNode(){if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)}walk(e){return this.constructor._walk(e,this.rootNode)}static _walk(e,n){return"string"==typeof n?e.addText(n):n.children&&(e.openNode(n),n.children.forEach(n=>this._walk(e,n)),e.closeNode(n)),e}static _collapse(e){"string"!=typeof e&&e.children&&(e.children.every(e=>"string"==typeof e)?e.children=[e.children.join("")]:e.children.forEach(e=>{c._collapse(e)}))}}class u extends c{constructor(e){super(),this.options=e}addKeyword(e,n){""!==e&&(this.openNode(n),this.addText(e),this.closeNode())}addText(e){""!==e&&this.add(e)}addSublanguage(e,n){const t=e.root;t.kind=n,t.sublanguage=!0,this.add(t)}toHTML(){return new l(this,this.options).value()}finalize(){return!0}}function d(e){return e?"string"==typeof e?e:e.source:null}const g="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",h={begin:"\\\\[\\s\\S]",relevance:0},f={className:"string",begin:"'",end:"'",illegal:"\\n",contains:[h]},p={className:"string",begin:'"',end:'"',illegal:"\\n",contains:[h]},b={begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},m=function(e,n,t={}){var a=r({className:"comment",begin:e,end:n,contains:[]},t);return a.contains.push(b),a.contains.push({className:"doctag",begin:"(?:TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):",relevance:0}),a},v=m("//","$"),x=m("/\\*","\\*/"),E=m("#","$");var _=Object.freeze({__proto__:null,IDENT_RE:"[a-zA-Z]\\w*",UNDERSCORE_IDENT_RE:"[a-zA-Z_]\\w*",NUMBER_RE:"\\b\\d+(\\.\\d+)?",C_NUMBER_RE:g,BINARY_NUMBER_RE:"\\b(0b[01]+)",RE_STARTERS_RE:"!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",SHEBANG:(e={})=>{const n=/^#![ ]*\//;return e.binary&&(e.begin=function(...e){return e.map(e=>d(e)).join("")}(n,/.*\b/,e.binary,/\b.*/)),r({className:"meta",begin:n,end:/$/,relevance:0,"on:begin":(e,n)=>{0!==e.index&&n.ignoreMatch()}},e)},BACKSLASH_ESCAPE:h,APOS_STRING_MODE:f,QUOTE_STRING_MODE:p,PHRASAL_WORDS_MODE:b,COMMENT:m,C_LINE_COMMENT_MODE:v,C_BLOCK_COMMENT_MODE:x,HASH_COMMENT_MODE:E,NUMBER_MODE:{className:"number",begin:"\\b\\d+(\\.\\d+)?",relevance:0},C_NUMBER_MODE:{className:"number",begin:g,relevance:0},BINARY_NUMBER_MODE:{className:"number",begin:"\\b(0b[01]+)",relevance:0},CSS_NUMBER_MODE:{className:"number",begin:"\\b\\d+(\\.\\d+)?(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",relevance:0},REGEXP_MODE:{begin:/(?=\/[^/\n]*\/)/,contains:[{className:"regexp",begin:/\//,end:/\/[gimuy]*/,illegal:/\n/,contains:[h,{begin:/\[/,end:/\]/,relevance:0,contains:[h]}]}]},TITLE_MODE:{className:"title",begin:"[a-zA-Z]\\w*",relevance:0},UNDERSCORE_TITLE_MODE:{className:"title",begin:"[a-zA-Z_]\\w*",relevance:0},METHOD_GUARD:{begin:"\\.\\s*[a-zA-Z_]\\w*",relevance:0},END_SAME_AS_BEGIN:function(e){return Object.assign(e,{"on:begin":(e,n)=>{n.data._beginMatch=e[1]},"on:end":(e,n)=>{n.data._beginMatch!==e[1]&&n.ignoreMatch()}})}}),N="of and for in not or if then".split(" ");function w(e,n){return n?+n:function(e){return N.includes(e.toLowerCase())}(e)?0:1}const R=t,y=r,{nodeStream:k,mergeStreams:O}=i,M=Symbol("nomatch");return function(t){var a=[],i={},s={},o=[],l=!0,c=/(^(<[^>]+>|\t|)+|\n)/gm,g="Could not find the language '{}', did you forget to load/include a language module?";const h={disableAutodetect:!0,name:"Plain text",contains:[]};var f={noHighlightRe:/^(no-?highlight)$/i,languageDetectRe:/\blang(?:uage)?-([\w-]+)\b/i,classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:null,__emitter:u};function p(e){return f.noHighlightRe.test(e)}function b(e,n,t,r){var a={code:n,language:e};S("before:highlight",a);var i=a.result?a.result:m(a.language,a.code,t,r);return i.code=a.code,S("after:highlight",i),i}function m(e,t,a,s){var o=t;function c(e,n){var t=E.case_insensitive?n[0].toLowerCase():n[0];return Object.prototype.hasOwnProperty.call(e.keywords,t)&&e.keywords[t]}function u(){null!=y.subLanguage?function(){if(""!==A){var e=null;if("string"==typeof y.subLanguage){if(!i[y.subLanguage])return void O.addText(A);e=m(y.subLanguage,A,!0,k[y.subLanguage]),k[y.subLanguage]=e.top}else e=v(A,y.subLanguage.length?y.subLanguage:null);y.relevance>0&&(I+=e.relevance),O.addSublanguage(e.emitter,e.language)}}():function(){if(!y.keywords)return void O.addText(A);let e=0;y.keywordPatternRe.lastIndex=0;let n=y.keywordPatternRe.exec(A),t="";for(;n;){t+=A.substring(e,n.index);const r=c(y,n);if(r){const[e,a]=r;O.addText(t),t="",I+=a,O.addKeyword(n[0],e)}else t+=n[0];e=y.keywordPatternRe.lastIndex,n=y.keywordPatternRe.exec(A)}t+=A.substr(e),O.addText(t)}(),A=""}function h(e){return e.className&&O.openNode(e.className),y=Object.create(e,{parent:{value:y}})}function p(e){return 0===y.matcher.regexIndex?(A+=e[0],1):(L=!0,0)}var b={};function x(t,r){var i=r&&r[0];if(A+=t,null==i)return u(),0;if("begin"===b.type&&"end"===r.type&&b.index===r.index&&""===i){if(A+=o.slice(r.index,r.index+1),!l){const n=Error("0 width match regex");throw n.languageName=e,n.badRule=b.rule,n}return 1}if(b=r,"begin"===r.type)return function(e){var t=e[0],r=e.rule;const a=new n(r),i=[r.__beforeBegin,r["on:begin"]];for(const n of i)if(n&&(n(e,a),a.ignore))return p(t);return r&&r.endSameAsBegin&&(r.endRe=RegExp(t.replace(/[-/\\^$*+?.()|[\]{}]/g,"\\$&"),"m")),r.skip?A+=t:(r.excludeBegin&&(A+=t),u(),r.returnBegin||r.excludeBegin||(A=t)),h(r),r.returnBegin?0:t.length}(r);if("illegal"===r.type&&!a){const e=Error('Illegal lexeme "'+i+'" for mode "'+(y.className||"")+'"');throw e.mode=y,e}if("end"===r.type){var s=function(e){var t=e[0],r=o.substr(e.index),a=function e(t,r,a){let i=function(e,n){var t=e&&e.exec(n);return t&&0===t.index}(t.endRe,a);if(i){if(t["on:end"]){const e=new n(t);t["on:end"](r,e),e.ignore&&(i=!1)}if(i){for(;t.endsParent&&t.parent;)t=t.parent;return t}}if(t.endsWithParent)return e(t.parent,r,a)}(y,e,r);if(!a)return M;var i=y;i.skip?A+=t:(i.returnEnd||i.excludeEnd||(A+=t),u(),i.excludeEnd&&(A=t));do{y.className&&O.closeNode(),y.skip||y.subLanguage||(I+=y.relevance),y=y.parent}while(y!==a.parent);return a.starts&&(a.endSameAsBegin&&(a.starts.endRe=a.endRe),h(a.starts)),i.returnEnd?0:t.length}(r);if(s!==M)return s}if("illegal"===r.type&&""===i)return 1;if(B>1e5&&B>3*r.index)throw Error("potential infinite loop, way more iterations than matches");return A+=i,i.length}var E=T(e);if(!E)throw console.error(g.replace("{}",e)),Error('Unknown language: "'+e+'"');var _=function(e){function n(n,t){return RegExp(d(n),"m"+(e.case_insensitive?"i":"")+(t?"g":""))}class t{constructor(){this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0}addRule(e,n){n.position=this.position++,this.matchIndexes[this.matchAt]=n,this.regexes.push([n,e]),this.matchAt+=function(e){return RegExp(e.toString()+"|").exec("").length-1}(e)+1}compile(){0===this.regexes.length&&(this.exec=()=>null);const e=this.regexes.map(e=>e[1]);this.matcherRe=n(function(e,n="|"){for(var t=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./,r=0,a="",i=0;i0&&(a+=n),a+="(";o.length>0;){var l=t.exec(o);if(null==l){a+=o;break}a+=o.substring(0,l.index),o=o.substring(l.index+l[0].length),"\\"===l[0][0]&&l[1]?a+="\\"+(+l[1]+s):(a+=l[0],"("===l[0]&&r++)}a+=")"}return a}(e),!0),this.lastIndex=0}exec(e){this.matcherRe.lastIndex=this.lastIndex;const n=this.matcherRe.exec(e);if(!n)return null;const t=n.findIndex((e,n)=>n>0&&void 0!==e),r=this.matchIndexes[t];return n.splice(0,t),Object.assign(n,r)}}class a{constructor(){this.rules=[],this.multiRegexes=[],this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(e){if(this.multiRegexes[e])return this.multiRegexes[e];const n=new t;return this.rules.slice(e).forEach(([e,t])=>n.addRule(e,t)),n.compile(),this.multiRegexes[e]=n,n}considerAll(){this.regexIndex=0}addRule(e,n){this.rules.push([e,n]),"begin"===n.type&&this.count++}exec(e){const n=this.getMatcher(this.regexIndex);n.lastIndex=this.lastIndex;const t=n.exec(e);return t&&(this.regexIndex+=t.position+1,this.regexIndex===this.count&&(this.regexIndex=0)),t}}function i(e,n){const t=e.input[e.index-1],r=e.input[e.index+e[0].length];"."!==t&&"."!==r||n.ignoreMatch()}if(e.contains&&e.contains.includes("self"))throw Error("ERR: contains `self` is not supported at the top-level of a language. See documentation.");return function t(s,o){const l=s;if(s.compiled)return l;s.compiled=!0,s.__beforeBegin=null,s.keywords=s.keywords||s.beginKeywords;let c=null;if("object"==typeof s.keywords&&(c=s.keywords.$pattern,delete s.keywords.$pattern),s.keywords&&(s.keywords=function(e,n){var t={};return"string"==typeof e?r("keyword",e):Object.keys(e).forEach((function(n){r(n,e[n])})),t;function r(e,r){n&&(r=r.toLowerCase()),r.split(" ").forEach((function(n){var r=n.split("|");t[r[0]]=[e,w(r[0],r[1])]}))}}(s.keywords,e.case_insensitive)),s.lexemes&&c)throw Error("ERR: Prefer `keywords.$pattern` to `mode.lexemes`, BOTH are not allowed. (see mode reference) ");return l.keywordPatternRe=n(s.lexemes||c||/\w+/,!0),o&&(s.beginKeywords&&(s.begin="\\b("+s.beginKeywords.split(" ").join("|")+")(?=\\b|\\s)",s.__beforeBegin=i),s.begin||(s.begin=/\B|\b/),l.beginRe=n(s.begin),s.endSameAsBegin&&(s.end=s.begin),s.end||s.endsWithParent||(s.end=/\B|\b/),s.end&&(l.endRe=n(s.end)),l.terminator_end=d(s.end)||"",s.endsWithParent&&o.terminator_end&&(l.terminator_end+=(s.end?"|":"")+o.terminator_end)),s.illegal&&(l.illegalRe=n(s.illegal)),void 0===s.relevance&&(s.relevance=1),s.contains||(s.contains=[]),s.contains=[].concat(...s.contains.map((function(e){return function(e){return e.variants&&!e.cached_variants&&(e.cached_variants=e.variants.map((function(n){return r(e,{variants:null},n)}))),e.cached_variants?e.cached_variants:function e(n){return!!n&&(n.endsWithParent||e(n.starts))}(e)?r(e,{starts:e.starts?r(e.starts):null}):Object.isFrozen(e)?r(e):e}("self"===e?s:e)}))),s.contains.forEach((function(e){t(e,l)})),s.starts&&t(s.starts,o),l.matcher=function(e){const n=new a;return e.contains.forEach(e=>n.addRule(e.begin,{rule:e,type:"begin"})),e.terminator_end&&n.addRule(e.terminator_end,{type:"end"}),e.illegal&&n.addRule(e.illegal,{type:"illegal"}),n}(l),l}(e)}(E),N="",y=s||_,k={},O=new f.__emitter(f);!function(){for(var e=[],n=y;n!==E;n=n.parent)n.className&&e.unshift(n.className);e.forEach(e=>O.openNode(e))}();var A="",I=0,S=0,B=0,L=!1;try{for(y.matcher.considerAll();;){B++,L?L=!1:(y.matcher.lastIndex=S,y.matcher.considerAll());const e=y.matcher.exec(o);if(!e)break;const n=x(o.substring(S,e.index),e);S=e.index+n}return x(o.substr(S)),O.closeAllNodes(),O.finalize(),N=O.toHTML(),{relevance:I,value:N,language:e,illegal:!1,emitter:O,top:y}}catch(n){if(n.message&&n.message.includes("Illegal"))return{illegal:!0,illegalBy:{msg:n.message,context:o.slice(S-100,S+100),mode:n.mode},sofar:N,relevance:0,value:R(o),emitter:O};if(l)return{illegal:!1,relevance:0,value:R(o),emitter:O,language:e,top:y,errorRaised:n};throw n}}function v(e,n){n=n||f.languages||Object.keys(i);var t=function(e){const n={relevance:0,emitter:new f.__emitter(f),value:R(e),illegal:!1,top:h};return n.emitter.addText(e),n}(e),r=t;return n.filter(T).filter(I).forEach((function(n){var a=m(n,e,!1);a.language=n,a.relevance>r.relevance&&(r=a),a.relevance>t.relevance&&(r=t,t=a)})),r.language&&(t.second_best=r),t}function x(e){return f.tabReplace||f.useBR?e.replace(c,e=>"\n"===e?f.useBR?"
":e:f.tabReplace?e.replace(/\t/g,f.tabReplace):e):e}function E(e){let n=null;const t=function(e){var n=e.className+" ";n+=e.parentNode?e.parentNode.className:"";const t=f.languageDetectRe.exec(n);if(t){var r=T(t[1]);return r||(console.warn(g.replace("{}",t[1])),console.warn("Falling back to no-highlight mode for this block.",e)),r?t[1]:"no-highlight"}return n.split(/\s+/).find(e=>p(e)||T(e))}(e);if(p(t))return;S("before:highlightBlock",{block:e,language:t}),f.useBR?(n=document.createElement("div")).innerHTML=e.innerHTML.replace(/\n/g,"").replace(//g,"\n"):n=e;const r=n.textContent,a=t?b(t,r,!0):v(r),i=k(n);if(i.length){const e=document.createElement("div");e.innerHTML=a.value,a.value=O(i,k(e),r)}a.value=x(a.value),S("after:highlightBlock",{block:e,result:a}),e.innerHTML=a.value,e.className=function(e,n,t){var r=n?s[n]:t,a=[e.trim()];return e.match(/\bhljs\b/)||a.push("hljs"),e.includes(r)||a.push(r),a.join(" ").trim()}(e.className,t,a.language),e.result={language:a.language,re:a.relevance,relavance:a.relevance},a.second_best&&(e.second_best={language:a.second_best.language,re:a.second_best.relevance,relavance:a.second_best.relevance})}const N=()=>{if(!N.called){N.called=!0;var e=document.querySelectorAll("pre code");a.forEach.call(e,E)}};function T(e){return e=(e||"").toLowerCase(),i[e]||i[s[e]]}function A(e,{languageName:n}){"string"==typeof e&&(e=[e]),e.forEach(e=>{s[e]=n})}function I(e){var n=T(e);return n&&!n.disableAutodetect}function S(e,n){var t=e;o.forEach((function(e){e[t]&&e[t](n)}))}Object.assign(t,{highlight:b,highlightAuto:v,fixMarkup:x,highlightBlock:E,configure:function(e){f=y(f,e)},initHighlighting:N,initHighlightingOnLoad:function(){window.addEventListener("DOMContentLoaded",N,!1)},registerLanguage:function(e,n){var r=null;try{r=n(t)}catch(n){if(console.error("Language definition for '{}' could not be registered.".replace("{}",e)),!l)throw n;console.error(n),r=h}r.name||(r.name=e),i[e]=r,r.rawDefinition=n.bind(null,t),r.aliases&&A(r.aliases,{languageName:e})},listLanguages:function(){return Object.keys(i)},getLanguage:T,registerAliases:A,requireLanguage:function(e){var n=T(e);if(n)return n;throw Error("The '{}' language is required, but not loaded.".replace("{}",e))},autoDetection:I,inherit:y,addPlugin:function(e){o.push(e)}}),t.debugMode=function(){l=!1},t.safeMode=function(){l=!0},t.versionString="10.1.1";for(const n in _)"object"==typeof _[n]&&e(_[n]);return Object.assign(t,_),t}({})}();"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=hljs);hljs.registerLanguage("php",function(){"use strict";return function(e){var r={begin:"\\$+[a-zA-Z_-ÿ][a-zA-Z0-9_-ÿ]*"},t={className:"meta",variants:[{begin:/<\?php/,relevance:10},{begin:/<\?[=]?/},{begin:/\?>/}]},a={className:"string",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:'b"',end:'"'},{begin:"b'",end:"'"},e.inherit(e.APOS_STRING_MODE,{illegal:null}),e.inherit(e.QUOTE_STRING_MODE,{illegal:null})]},n={variants:[e.BINARY_NUMBER_MODE,e.C_NUMBER_MODE]},i={keyword:"__CLASS__ __DIR__ __FILE__ __FUNCTION__ __LINE__ __METHOD__ __NAMESPACE__ __TRAIT__ die echo exit include include_once print require require_once array abstract and as binary bool boolean break callable case catch class clone const continue declare default do double else elseif empty enddeclare endfor endforeach endif endswitch endwhile eval extends final finally float for foreach from global goto if implements instanceof insteadof int integer interface isset iterable list new object or private protected public real return string switch throw trait try unset use var void while xor yield",literal:"false null true",built_in:"Error|0 AppendIterator ArgumentCountError ArithmeticError ArrayIterator ArrayObject AssertionError BadFunctionCallException BadMethodCallException CachingIterator CallbackFilterIterator CompileError Countable DirectoryIterator DivisionByZeroError DomainException EmptyIterator ErrorException Exception FilesystemIterator FilterIterator GlobIterator InfiniteIterator InvalidArgumentException IteratorIterator LengthException LimitIterator LogicException MultipleIterator NoRewindIterator OutOfBoundsException OutOfRangeException OuterIterator OverflowException ParentIterator ParseError RangeException RecursiveArrayIterator RecursiveCachingIterator RecursiveCallbackFilterIterator RecursiveDirectoryIterator RecursiveFilterIterator RecursiveIterator RecursiveIteratorIterator RecursiveRegexIterator RecursiveTreeIterator RegexIterator RuntimeException SeekableIterator SplDoublyLinkedList SplFileInfo SplFileObject SplFixedArray SplHeap SplMaxHeap SplMinHeap SplObjectStorage SplObserver SplObserver SplPriorityQueue SplQueue SplStack SplSubject SplSubject SplTempFileObject TypeError UnderflowException UnexpectedValueException ArrayAccess Closure Generator Iterator IteratorAggregate Serializable Throwable Traversable WeakReference Directory __PHP_Incomplete_Class parent php_user_filter self static stdClass"};return{aliases:["php","php3","php4","php5","php6","php7"],case_insensitive:!0,keywords:i,contains:[e.HASH_COMMENT_MODE,e.COMMENT("//","$",{contains:[t]}),e.COMMENT("/\\*","\\*/",{contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.COMMENT("__halt_compiler.+?;",!1,{endsWithParent:!0,keywords:"__halt_compiler"}),{className:"string",begin:/<<<['"]?\w+['"]?$/,end:/^\w+;?$/,contains:[e.BACKSLASH_ESCAPE,{className:"subst",variants:[{begin:/\$\w+/},{begin:/\{\$/,end:/\}/}]}]},t,{className:"keyword",begin:/\$this\b/},r,{begin:/(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/},{className:"function",beginKeywords:"fn function",end:/[;{]/,excludeEnd:!0,illegal:"[$%\\[]",contains:[e.UNDERSCORE_TITLE_MODE,{className:"params",begin:"\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0,keywords:i,contains:["self",r,e.C_BLOCK_COMMENT_MODE,a,n]}]},{className:"class",beginKeywords:"class interface",end:"{",excludeEnd:!0,illegal:/[:\(\$"]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"namespace",end:";",illegal:/[\.']/,contains:[e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"use",end:";",contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"=>"},a,n]}}}());hljs.registerLanguage("nginx",function(){"use strict";return function(e){var n={className:"variable",variants:[{begin:/\$\d+/},{begin:/\$\{/,end:/}/},{begin:"[\\$\\@]"+e.UNDERSCORE_IDENT_RE}]},a={endsWithParent:!0,keywords:{$pattern:"[a-z/_]+",literal:"on off yes no true false none blocked debug info notice warn error crit select break last permanent redirect kqueue rtsig epoll poll /dev/poll"},relevance:0,illegal:"=>",contains:[e.HASH_COMMENT_MODE,{className:"string",contains:[e.BACKSLASH_ESCAPE,n],variants:[{begin:/"/,end:/"/},{begin:/'/,end:/'/}]},{begin:"([a-z]+):/",end:"\\s",endsWithParent:!0,excludeEnd:!0,contains:[n]},{className:"regexp",contains:[e.BACKSLASH_ESCAPE,n],variants:[{begin:"\\s\\^",end:"\\s|{|;",returnEnd:!0},{begin:"~\\*?\\s+",end:"\\s|{|;",returnEnd:!0},{begin:"\\*(\\.[a-z\\-]+)+"},{begin:"([a-z\\-]+\\.)+\\*"}]},{className:"number",begin:"\\b\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?\\b"},{className:"number",begin:"\\b\\d+[kKmMgGdshdwy]*\\b",relevance:0},n]};return{name:"Nginx config",aliases:["nginxconf"],contains:[e.HASH_COMMENT_MODE,{begin:e.UNDERSCORE_IDENT_RE+"\\s+{",returnBegin:!0,end:"{",contains:[{className:"section",begin:e.UNDERSCORE_IDENT_RE}],relevance:0},{begin:e.UNDERSCORE_IDENT_RE+"\\s",end:";|{",returnBegin:!0,contains:[{className:"attribute",begin:e.UNDERSCORE_IDENT_RE,starts:a}],relevance:0}],illegal:"[^\\s\\}]"}}}());hljs.registerLanguage("csharp",function(){"use strict";return function(e){var n={keyword:"abstract as base bool break byte case catch char checked const continue decimal default delegate do double enum event explicit extern finally fixed float for foreach goto if implicit in int interface internal is lock long object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this try typeof uint ulong unchecked unsafe ushort using virtual void volatile while add alias ascending async await by descending dynamic equals from get global group into join let nameof on orderby partial remove select set value var when where yield",literal:"null false true"},i=e.inherit(e.TITLE_MODE,{begin:"[a-zA-Z](\\.?\\w)*"}),a={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},s={className:"string",begin:'@"',end:'"',contains:[{begin:'""'}]},t=e.inherit(s,{illegal:/\n/}),l={className:"subst",begin:"{",end:"}",keywords:n},r=e.inherit(l,{illegal:/\n/}),c={className:"string",begin:/\$"/,end:'"',illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},e.BACKSLASH_ESCAPE,r]},o={className:"string",begin:/\$@"/,end:'"',contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},l]},g=e.inherit(o,{illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},r]});l.contains=[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.C_BLOCK_COMMENT_MODE],r.contains=[g,c,t,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.inherit(e.C_BLOCK_COMMENT_MODE,{illegal:/\n/})];var d={variants:[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},E={begin:"<",end:">",contains:[{beginKeywords:"in out"},i]},_=e.IDENT_RE+"(<"+e.IDENT_RE+"(\\s*,\\s*"+e.IDENT_RE+")*>)?(\\[\\])?",b={begin:"@"+e.IDENT_RE,relevance:0};return{name:"C#",aliases:["cs","c#"],keywords:n,illegal:/::/,contains:[e.COMMENT("///","$",{returnBegin:!0,contains:[{className:"doctag",variants:[{begin:"///",relevance:0},{begin:"\x3c!--|--\x3e"},{begin:""}]}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"meta",begin:"#",end:"$",keywords:{"meta-keyword":"if else elif endif define undef warning error line region endregion pragma checksum"}},d,a,{beginKeywords:"class interface",end:/[{;=]/,illegal:/[^\s:,]/,contains:[{beginKeywords:"where class"},i,E,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"namespace",end:/[{;=]/,illegal:/[^\s:]/,contains:[i,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"meta",begin:"^\\s*\\[",excludeBegin:!0,end:"\\]",excludeEnd:!0,contains:[{className:"meta-string",begin:/"/,end:/"/}]},{beginKeywords:"new return throw await else",relevance:0},{className:"function",begin:"("+_+"\\s+)+"+e.IDENT_RE+"\\s*(\\<.+\\>)?\\s*\\(",returnBegin:!0,end:/\s*[{;=]/,excludeEnd:!0,keywords:n,contains:[{begin:e.IDENT_RE+"\\s*(\\<.+\\>)?\\s*\\(",returnBegin:!0,contains:[e.TITLE_MODE,E],relevance:0},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:n,relevance:0,contains:[d,a,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},b]}}}());hljs.registerLanguage("perl",function(){"use strict";return function(e){var n={$pattern:/[\w.]+/,keyword:"getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qq fileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent shutdown dump chomp connect getsockname die socketpair close flock exists index shmget sub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedir ioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when"},t={className:"subst",begin:"[$@]\\{",end:"\\}",keywords:n},s={begin:"->{",end:"}"},r={variants:[{begin:/\$\d/},{begin:/[\$%@](\^\w\b|#\w+(::\w+)*|{\w+}|\w+(::\w*)*)/},{begin:/[\$%@][^\s\w{]/,relevance:0}]},i=[e.BACKSLASH_ESCAPE,t,r],a=[r,e.HASH_COMMENT_MODE,e.COMMENT("^\\=\\w","\\=cut",{endsWithParent:!0}),s,{className:"string",contains:i,variants:[{begin:"q[qwxr]?\\s*\\(",end:"\\)",relevance:5},{begin:"q[qwxr]?\\s*\\[",end:"\\]",relevance:5},{begin:"q[qwxr]?\\s*\\{",end:"\\}",relevance:5},{begin:"q[qwxr]?\\s*\\|",end:"\\|",relevance:5},{begin:"q[qwxr]?\\s*\\<",end:"\\>",relevance:5},{begin:"qw\\s+q",end:"q",relevance:5},{begin:"'",end:"'",contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"'},{begin:"`",end:"`",contains:[e.BACKSLASH_ESCAPE]},{begin:"{\\w+}",contains:[],relevance:0},{begin:"-?\\w+\\s*\\=\\>",contains:[],relevance:0}]},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\/\\/|"+e.RE_STARTERS_RE+"|\\b(split|return|print|reverse|grep)\\b)\\s*",keywords:"split return print reverse grep",relevance:0,contains:[e.HASH_COMMENT_MODE,{className:"regexp",begin:"(s|tr|y)/(\\\\.|[^/])*/(\\\\.|[^/])*/[a-z]*",relevance:10},{className:"regexp",begin:"(m|qr)?/",end:"/[a-z]*",contains:[e.BACKSLASH_ESCAPE],relevance:0}]},{className:"function",beginKeywords:"sub",end:"(\\s*\\(.*?\\))?[;{]",excludeEnd:!0,relevance:5,contains:[e.TITLE_MODE]},{begin:"-\\w\\b",relevance:0},{begin:"^__DATA__$",end:"^__END__$",subLanguage:"mojolicious",contains:[{begin:"^@@.*",end:"$",className:"comment"}]}];return t.contains=a,s.contains=a,{name:"Perl",aliases:["pl","pm"],keywords:n,contains:a}}}());hljs.registerLanguage("swift",function(){"use strict";return function(e){var i={keyword:"#available #colorLiteral #column #else #elseif #endif #file #fileLiteral #function #if #imageLiteral #line #selector #sourceLocation _ __COLUMN__ __FILE__ __FUNCTION__ __LINE__ Any as as! as? associatedtype associativity break case catch class continue convenience default defer deinit didSet do dynamic dynamicType else enum extension fallthrough false fileprivate final for func get guard if import in indirect infix init inout internal is lazy left let mutating nil none nonmutating open operator optional override postfix precedence prefix private protocol Protocol public repeat required rethrows return right self Self set static struct subscript super switch throw throws true try try! try? Type typealias unowned var weak where while willSet",literal:"true false nil",built_in:"abs advance alignof alignofValue anyGenerator assert assertionFailure bridgeFromObjectiveC bridgeFromObjectiveCUnconditional bridgeToObjectiveC bridgeToObjectiveCUnconditional c compactMap contains count countElements countLeadingZeros debugPrint debugPrintln distance dropFirst dropLast dump encodeBitsAsWords enumerate equal fatalError filter find getBridgedObjectiveCType getVaList indices insertionSort isBridgedToObjectiveC isBridgedVerbatimToObjectiveC isUniquelyReferenced isUniquelyReferencedNonObjC join lazy lexicographicalCompare map max maxElement min minElement numericCast overlaps partition posix precondition preconditionFailure print println quickSort readLine reduce reflect reinterpretCast reverse roundUpToAlignment sizeof sizeofValue sort split startsWith stride strideof strideofValue swap toString transcode underestimateCount unsafeAddressOf unsafeBitCast unsafeDowncast unsafeUnwrap unsafeReflect withExtendedLifetime withObjectAtPlusZero withUnsafePointer withUnsafePointerToObject withUnsafeMutablePointer withUnsafeMutablePointers withUnsafePointer withUnsafePointers withVaList zip"},n=e.COMMENT("/\\*","\\*/",{contains:["self"]}),t={className:"subst",begin:/\\\(/,end:"\\)",keywords:i,contains:[]},a={className:"string",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:/"""/,end:/"""/},{begin:/"/,end:/"/}]},r={className:"number",begin:"\\b([\\d_]+(\\.[\\deE_]+)?|0x[a-fA-F0-9_]+(\\.[a-fA-F0-9p_]+)?|0b[01_]+|0o[0-7_]+)\\b",relevance:0};return t.contains=[r],{name:"Swift",keywords:i,contains:[a,e.C_LINE_COMMENT_MODE,n,{className:"type",begin:"\\b[A-Z][\\wÀ-ʸ']*[!?]"},{className:"type",begin:"\\b[A-Z][\\wÀ-ʸ']*",relevance:0},r,{className:"function",beginKeywords:"func",end:"{",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][0-9A-Za-z$_]*/}),{begin://},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:i,contains:["self",r,a,e.C_BLOCK_COMMENT_MODE,{begin:":"}],illegal:/["']/}],illegal:/\[|%/},{className:"class",beginKeywords:"struct protocol class extension enum",keywords:i,end:"\\{",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][\u00C0-\u02B80-9A-Za-z$_]*/})]},{className:"meta",begin:"(@discardableResult|@warn_unused_result|@exported|@lazy|@noescape|@NSCopying|@NSManaged|@objc|@objcMembers|@convention|@required|@noreturn|@IBAction|@IBDesignable|@IBInspectable|@IBOutlet|@infix|@prefix|@postfix|@autoclosure|@testable|@available|@nonobjc|@NSApplicationMain|@UIApplicationMain|@dynamicMemberLookup|@propertyWrapper)\\b"},{beginKeywords:"import",end:/$/,contains:[e.C_LINE_COMMENT_MODE,n]}]}}}());hljs.registerLanguage("makefile",function(){"use strict";return function(e){var i={className:"variable",variants:[{begin:"\\$\\("+e.UNDERSCORE_IDENT_RE+"\\)",contains:[e.BACKSLASH_ESCAPE]},{begin:/\$[@%`]+/}]}]}]};return{name:"HTML, XML",aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"],case_insensitive:!0,contains:[{className:"meta",begin:"",relevance:10,contains:[a,i,t,s,{begin:"\\[",end:"\\]",contains:[{className:"meta",begin:"",contains:[a,s,i,t]}]}]},e.COMMENT("\x3c!--","--\x3e",{relevance:10}),{begin:"<\\!\\[CDATA\\[",end:"\\]\\]>",relevance:10},n,{className:"meta",begin:/<\?xml/,end:/\?>/,relevance:10},{className:"tag",begin:")",end:">",keywords:{name:"style"},contains:[c],starts:{end:"",returnEnd:!0,subLanguage:["css","xml"]}},{className:"tag",begin:")",end:">",keywords:{name:"script"},contains:[c],starts:{end:"<\/script>",returnEnd:!0,subLanguage:["javascript","handlebars","xml"]}},{className:"tag",begin:"",contains:[{className:"name",begin:/[^\/><\s]+/,relevance:0},c]}]}}}());hljs.registerLanguage("bash",function(){"use strict";return function(e){const s={};Object.assign(s,{className:"variable",variants:[{begin:/\$[\w\d#@][\w\d_]*/},{begin:/\$\{/,end:/\}/,contains:[{begin:/:-/,contains:[s]}]}]});const t={className:"subst",begin:/\$\(/,end:/\)/,contains:[e.BACKSLASH_ESCAPE]},n={className:"string",begin:/"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,s,t]};t.contains.push(n);const a={begin:/\$\(\(/,end:/\)\)/,contains:[{begin:/\d+#[0-9a-f]+/,className:"number"},e.NUMBER_MODE,s]},i=e.SHEBANG({binary:"(fish|bash|zsh|sh|csh|ksh|tcsh|dash|scsh)",relevance:10}),c={className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0};return{name:"Bash",aliases:["sh","zsh"],keywords:{$pattern:/\b-?[a-z\._]+\b/,keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},contains:[i,e.SHEBANG(),c,a,e.HASH_COMMENT_MODE,n,{className:"",begin:/\\"/},{className:"string",begin:/'/,end:/'/},s]}}}());hljs.registerLanguage("c-like",function(){"use strict";return function(e){function t(e){return"(?:"+e+")?"}var n="(decltype\\(auto\\)|"+t("[a-zA-Z_]\\w*::")+"[a-zA-Z_]\\w*"+t("<.*?>")+")",r={className:"keyword",begin:"\\b[a-z\\d_]*_t\\b"},a={className:"string",variants:[{begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)",end:"'",illegal:"."},e.END_SAME_AS_BEGIN({begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},i={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},s={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{"meta-keyword":"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include"},contains:[{begin:/\\\n/,relevance:0},e.inherit(a,{className:"meta-string"}),{className:"meta-string",begin:/<.*?>/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},o={className:"title",begin:t("[a-zA-Z_]\\w*::")+e.IDENT_RE,relevance:0},c=t("[a-zA-Z_]\\w*::")+e.IDENT_RE+"\\s*\\(",l={keyword:"int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid wchar_t short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignas alignof constexpr consteval constinit decltype concept co_await co_return co_yield requires noexcept static_assert thread_local restrict final override atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq",built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary",literal:"true false nullptr NULL"},d=[r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,i,a],_={variants:[{begin:/=/,end:/;/},{begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}],keywords:l,contains:d.concat([{begin:/\(/,end:/\)/,keywords:l,contains:d.concat(["self"]),relevance:0}]),relevance:0},u={className:"function",begin:"("+n+"[\\*&\\s]+)+"+c,returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:l,illegal:/[^\w\s\*&:<>]/,contains:[{begin:"decltype\\(auto\\)",keywords:l,relevance:0},{begin:c,returnBegin:!0,contains:[o],relevance:0},{className:"params",begin:/\(/,end:/\)/,keywords:l,relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r,{begin:/\(/,end:/\)/,keywords:l,relevance:0,contains:["self",e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r]}]},r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s]};return{aliases:["c","cc","h","c++","h++","hpp","hh","hxx","cxx"],keywords:l,disableAutodetect:!0,illegal:"",keywords:l,contains:["self",r]},{begin:e.IDENT_RE+"::",keywords:l},{className:"class",beginKeywords:"class struct",end:/[{;:]/,contains:[{begin://,contains:["self"]},e.TITLE_MODE]}]),exports:{preprocessor:s,strings:a,keywords:l}}}}());hljs.registerLanguage("coffeescript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);return function(r){var t={keyword:e.concat(["then","unless","until","loop","by","when","and","or","is","isnt","not"]).filter((e=>n=>!e.includes(n))(["var","const","let","function","static"])).join(" "),literal:n.concat(["yes","no","on","off"]).join(" "),built_in:a.concat(["npm","print"]).join(" ")},i="[A-Za-z$_][0-9A-Za-z$_]*",s={className:"subst",begin:/#\{/,end:/}/,keywords:t},o=[r.BINARY_NUMBER_MODE,r.inherit(r.C_NUMBER_MODE,{starts:{end:"(\\s*/)?",relevance:0}}),{className:"string",variants:[{begin:/'''/,end:/'''/,contains:[r.BACKSLASH_ESCAPE]},{begin:/'/,end:/'/,contains:[r.BACKSLASH_ESCAPE]},{begin:/"""/,end:/"""/,contains:[r.BACKSLASH_ESCAPE,s]},{begin:/"/,end:/"/,contains:[r.BACKSLASH_ESCAPE,s]}]},{className:"regexp",variants:[{begin:"///",end:"///",contains:[s,r.HASH_COMMENT_MODE]},{begin:"//[gim]{0,3}(?=\\W)",relevance:0},{begin:/\/(?![ *]).*?(?![\\]).\/[gim]{0,3}(?=\W)/}]},{begin:"@"+i},{subLanguage:"javascript",excludeBegin:!0,excludeEnd:!0,variants:[{begin:"```",end:"```"},{begin:"`",end:"`"}]}];s.contains=o;var c=r.inherit(r.TITLE_MODE,{begin:i}),l={className:"params",begin:"\\([^\\(]",returnBegin:!0,contains:[{begin:/\(/,end:/\)/,keywords:t,contains:["self"].concat(o)}]};return{name:"CoffeeScript",aliases:["coffee","cson","iced"],keywords:t,illegal:/\/\*/,contains:o.concat([r.COMMENT("###","###"),r.HASH_COMMENT_MODE,{className:"function",begin:"^\\s*"+i+"\\s*=\\s*(\\(.*\\))?\\s*\\B[-=]>",end:"[-=]>",returnBegin:!0,contains:[c,l]},{begin:/[:\(,=]\s*/,relevance:0,contains:[{className:"function",begin:"(\\(.*\\))?\\s*\\B[-=]>",end:"[-=]>",returnBegin:!0,contains:[l]}]},{className:"class",beginKeywords:"class",end:"$",illegal:/[:="\[\]]/,contains:[{beginKeywords:"extends",endsWithParent:!0,illegal:/[:="\[\]]/,contains:[c]},c]},{begin:i+":",end:":",returnBegin:!0,returnEnd:!0,relevance:0}])}}}());hljs.registerLanguage("ruby",function(){"use strict";return function(e){var n="[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?",a={keyword:"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor",literal:"true false nil"},s={className:"doctag",begin:"@[A-Za-z]+"},i={begin:"#<",end:">"},r=[e.COMMENT("#","$",{contains:[s]}),e.COMMENT("^\\=begin","^\\=end",{contains:[s],relevance:10}),e.COMMENT("^__END__","\\n$")],c={className:"subst",begin:"#\\{",end:"}",keywords:a},t={className:"string",contains:[e.BACKSLASH_ESCAPE,c],variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/`/,end:/`/},{begin:"%[qQwWx]?\\(",end:"\\)"},{begin:"%[qQwWx]?\\[",end:"\\]"},{begin:"%[qQwWx]?{",end:"}"},{begin:"%[qQwWx]?<",end:">"},{begin:"%[qQwWx]?/",end:"/"},{begin:"%[qQwWx]?%",end:"%"},{begin:"%[qQwWx]?-",end:"-"},{begin:"%[qQwWx]?\\|",end:"\\|"},{begin:/\B\?(\\\d{1,3}|\\x[A-Fa-f0-9]{1,2}|\\u[A-Fa-f0-9]{4}|\\?\S)\b/},{begin:/<<[-~]?'?(\w+)(?:.|\n)*?\n\s*\1\b/,returnBegin:!0,contains:[{begin:/<<[-~]?'?/},e.END_SAME_AS_BEGIN({begin:/(\w+)/,end:/(\w+)/,contains:[e.BACKSLASH_ESCAPE,c]})]}]},b={className:"params",begin:"\\(",end:"\\)",endsParent:!0,keywords:a},d=[t,i,{className:"class",beginKeywords:"class module",end:"$|;",illegal:/=/,contains:[e.inherit(e.TITLE_MODE,{begin:"[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?"}),{begin:"<\\s*",contains:[{begin:"("+e.IDENT_RE+"::)?"+e.IDENT_RE}]}].concat(r)},{className:"function",beginKeywords:"def",end:"$|;",contains:[e.inherit(e.TITLE_MODE,{begin:n}),b].concat(r)},{begin:e.IDENT_RE+"::"},{className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"(\\!|\\?)?:",relevance:0},{className:"symbol",begin:":(?!\\s)",contains:[t,{begin:n}],relevance:0},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{className:"params",begin:/\|/,end:/\|/,keywords:a},{begin:"("+e.RE_STARTERS_RE+"|unless)\\s*",keywords:"unless",contains:[i,{className:"regexp",contains:[e.BACKSLASH_ESCAPE,c],illegal:/\n/,variants:[{begin:"/",end:"/[a-z]*"},{begin:"%r{",end:"}[a-z]*"},{begin:"%r\\(",end:"\\)[a-z]*"},{begin:"%r!",end:"![a-z]*"},{begin:"%r\\[",end:"\\][a-z]*"}]}].concat(r),relevance:0}].concat(r);c.contains=d,b.contains=d;var g=[{begin:/^\s*=>/,starts:{end:"$",contains:d}},{className:"meta",begin:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+>|(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>)",starts:{end:"$",contains:d}}];return{name:"Ruby",aliases:["rb","gemspec","podspec","thor","irb"],keywords:a,illegal:/\/\*/,contains:r.concat(g).concat(d)}}}());hljs.registerLanguage("yaml",function(){"use strict";return function(e){var n="true false yes no null",a="[\\w#;/?:@&=+$,.~*\\'()[\\]]+",s={className:"string",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/\S+/}],contains:[e.BACKSLASH_ESCAPE,{className:"template-variable",variants:[{begin:"{{",end:"}}"},{begin:"%{",end:"}"}]}]},i=e.inherit(s,{variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/[^\s,{}[\]]+/}]}),l={end:",",endsWithParent:!0,excludeEnd:!0,contains:[],keywords:n,relevance:0},t={begin:"{",end:"}",contains:[l],illegal:"\\n",relevance:0},g={begin:"\\[",end:"\\]",contains:[l],illegal:"\\n",relevance:0},b=[{className:"attr",variants:[{begin:"\\w[\\w :\\/.-]*:(?=[ \t]|$)"},{begin:'"\\w[\\w :\\/.-]*":(?=[ \t]|$)'},{begin:"'\\w[\\w :\\/.-]*':(?=[ \t]|$)"}]},{className:"meta",begin:"^---s*$",relevance:10},{className:"string",begin:"[\\|>]([0-9]?[+-])?[ ]*\\n( *)[\\S ]+\\n(\\2[\\S ]+\\n?)*"},{begin:"<%[%=-]?",end:"[%-]?%>",subLanguage:"ruby",excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:"!\\w+!"+a},{className:"type",begin:"!<"+a+">"},{className:"type",begin:"!"+a},{className:"type",begin:"!!"+a},{className:"meta",begin:"&"+e.UNDERSCORE_IDENT_RE+"$"},{className:"meta",begin:"\\*"+e.UNDERSCORE_IDENT_RE+"$"},{className:"bullet",begin:"\\-(?=[ ]|$)",relevance:0},e.HASH_COMMENT_MODE,{beginKeywords:n,keywords:{literal:n}},{className:"number",begin:"\\b[0-9]{4}(-[0-9][0-9]){0,2}([Tt \\t][0-9][0-9]?(:[0-9][0-9]){2})?(\\.[0-9]*)?([ \\t])*(Z|[-+][0-9][0-9]?(:[0-9][0-9])?)?\\b"},{className:"number",begin:e.C_NUMBER_RE+"\\b"},t,g,s],c=[...b];return c.pop(),c.push(i),l.contains=c,{name:"YAML",case_insensitive:!0,aliases:["yml","YAML"],contains:b}}}());hljs.registerLanguage("d",function(){"use strict";return function(e){var a={$pattern:e.UNDERSCORE_IDENT_RE,keyword:"abstract alias align asm assert auto body break byte case cast catch class const continue debug default delete deprecated do else enum export extern final finally for foreach foreach_reverse|10 goto if immutable import in inout int interface invariant is lazy macro mixin module new nothrow out override package pragma private protected public pure ref return scope shared static struct super switch synchronized template this throw try typedef typeid typeof union unittest version void volatile while with __FILE__ __LINE__ __gshared|10 __thread __traits __DATE__ __EOF__ __TIME__ __TIMESTAMP__ __VENDOR__ __VERSION__",built_in:"bool cdouble cent cfloat char creal dchar delegate double dstring float function idouble ifloat ireal long real short string ubyte ucent uint ulong ushort wchar wstring",literal:"false null true"},d="((0|[1-9][\\d_]*)|0[bB][01_]+|0[xX]([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*))",n="\\\\(['\"\\?\\\\abfnrtv]|u[\\dA-Fa-f]{4}|[0-7]{1,3}|x[\\dA-Fa-f]{2}|U[\\dA-Fa-f]{8})|&[a-zA-Z\\d]{2,};",t={className:"number",begin:"\\b"+d+"(L|u|U|Lu|LU|uL|UL)?",relevance:0},_={className:"number",begin:"\\b(((0[xX](([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*)\\.([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*)|\\.?([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*))[pP][+-]?(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d))|((0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)(\\.\\d*|([eE][+-]?(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)))|\\d+\\.(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)|\\.(0|[1-9][\\d_]*)([eE][+-]?(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d))?))([fF]|L|i|[fF]i|Li)?|"+d+"(i|[fF]i|Li))",relevance:0},r={className:"string",begin:"'("+n+"|.)",end:"'",illegal:"."},i={className:"string",begin:'"',contains:[{begin:n,relevance:0}],end:'"[cwd]?'},s=e.COMMENT("\\/\\+","\\+\\/",{contains:["self"],relevance:10});return{name:"D",keywords:a,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,{className:"string",begin:'x"[\\da-fA-F\\s\\n\\r]*"[cwd]?',relevance:10},i,{className:"string",begin:'[rq]"',end:'"[cwd]?',relevance:5},{className:"string",begin:"`",end:"`[cwd]?"},{className:"string",begin:'q"\\{',end:'\\}"'},_,t,r,{className:"meta",begin:"^#!",end:"$",relevance:5},{className:"meta",begin:"#(line)",end:"$",relevance:5},{className:"keyword",begin:"@[a-zA-Z_][a-zA-Z_\\d]*"}]}}}());hljs.registerLanguage("properties",function(){"use strict";return function(e){var n="[ \\t\\f]*",t="("+n+"[:=]"+n+"|[ \\t\\f]+)",a="([^\\\\:= \\t\\f\\n]|\\\\.)+",s={end:t,relevance:0,starts:{className:"string",end:/$/,relevance:0,contains:[{begin:"\\\\\\n"}]}};return{name:".properties",case_insensitive:!0,illegal:/\S/,contains:[e.COMMENT("^\\s*[!#]","$"),{begin:"([^\\\\\\W:= \\t\\f\\n]|\\\\.)+"+t,returnBegin:!0,contains:[{className:"attr",begin:"([^\\\\\\W:= \\t\\f\\n]|\\\\.)+",endsParent:!0,relevance:0}],starts:s},{begin:a+t,returnBegin:!0,relevance:0,contains:[{className:"meta",begin:a,endsParent:!0,relevance:0}],starts:s},{className:"attr",relevance:0,begin:a+n+"$"}]}}}());hljs.registerLanguage("http",function(){"use strict";return function(e){var n="HTTP/[0-9\\.]+";return{name:"HTTP",aliases:["https"],illegal:"\\S",contains:[{begin:"^"+n,end:"$",contains:[{className:"number",begin:"\\b\\d{3}\\b"}]},{begin:"^[A-Z]+ (.*?) "+n+"$",returnBegin:!0,end:"$",contains:[{className:"string",begin:" ",end:" ",excludeBegin:!0,excludeEnd:!0},{begin:n},{className:"keyword",begin:"[A-Z]+"}]},{className:"attribute",begin:"^\\w",end:": ",excludeEnd:!0,illegal:"\\n|\\s|=",starts:{end:"$",relevance:0}},{begin:"\\n\\n",starts:{subLanguage:[],endsWithParent:!0}}]}}}());hljs.registerLanguage("haskell",function(){"use strict";return function(e){var n={variants:[e.COMMENT("--","$"),e.COMMENT("{-","-}",{contains:["self"]})]},i={className:"meta",begin:"{-#",end:"#-}"},a={className:"meta",begin:"^#",end:"$"},s={className:"type",begin:"\\b[A-Z][\\w']*",relevance:0},l={begin:"\\(",end:"\\)",illegal:'"',contains:[i,a,{className:"type",begin:"\\b[A-Z][\\w]*(\\((\\.\\.|,|\\w+)\\))?"},e.inherit(e.TITLE_MODE,{begin:"[_a-z][\\w']*"}),n]};return{name:"Haskell",aliases:["hs"],keywords:"let in if then else case of where do module import hiding qualified type data newtype deriving class instance as default infix infixl infixr foreign export ccall stdcall cplusplus jvm dotnet safe unsafe family forall mdo proc rec",contains:[{beginKeywords:"module",end:"where",keywords:"module where",contains:[l,n],illegal:"\\W\\.|;"},{begin:"\\bimport\\b",end:"$",keywords:"import qualified as hiding",contains:[l,n],illegal:"\\W\\.|;"},{className:"class",begin:"^(\\s*)?(class|instance)\\b",end:"where",keywords:"class family instance where",contains:[s,l,n]},{className:"class",begin:"\\b(data|(new)?type)\\b",end:"$",keywords:"data family type newtype deriving",contains:[i,s,l,{begin:"{",end:"}",contains:l.contains},n]},{beginKeywords:"default",end:"$",contains:[s,l,n]},{beginKeywords:"infix infixl infixr",end:"$",contains:[e.C_NUMBER_MODE,n]},{begin:"\\bforeign\\b",end:"$",keywords:"foreign import export ccall stdcall cplusplus jvm dotnet safe unsafe",contains:[s,e.QUOTE_STRING_MODE,n]},{className:"meta",begin:"#!\\/usr\\/bin\\/env runhaskell",end:"$"},i,a,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,s,e.inherit(e.TITLE_MODE,{begin:"^[_a-z][\\w']*"}),n,{begin:"->|<-"}]}}}());hljs.registerLanguage("handlebars",function(){"use strict";function e(...e){return e.map(e=>(function(e){return e?"string"==typeof e?e:e.source:null})(e)).join("")}return function(n){const a={"builtin-name":"action bindattr collection component concat debugger each each-in get hash if in input link-to loc log lookup mut outlet partial query-params render template textarea unbound unless view with yield"},t=/\[.*?\]/,s=/[^\s!"#%&'()*+,.\/;<=>@\[\\\]^`{|}~]+/,i=e("(",/'.*?'/,"|",/".*?"/,"|",t,"|",s,"|",/\.|\//,")+"),r=e("(",t,"|",s,")(?==)"),l={begin:i,lexemes:/[\w.\/]+/},c=n.inherit(l,{keywords:{literal:"true false undefined null"}}),o={begin:/\(/,end:/\)/},m={className:"attr",begin:r,relevance:0,starts:{begin:/=/,end:/=/,starts:{contains:[n.NUMBER_MODE,n.QUOTE_STRING_MODE,n.APOS_STRING_MODE,c,o]}}},d={contains:[n.NUMBER_MODE,n.QUOTE_STRING_MODE,n.APOS_STRING_MODE,{begin:/as\s+\|/,keywords:{keyword:"as"},end:/\|/,contains:[{begin:/\w+/}]},m,c,o],returnEnd:!0},g=n.inherit(l,{className:"name",keywords:a,starts:n.inherit(d,{end:/\)/})});o.contains=[g];const u=n.inherit(l,{keywords:a,className:"name",starts:n.inherit(d,{end:/}}/})}),b=n.inherit(l,{keywords:a,className:"name"}),h=n.inherit(l,{className:"name",keywords:a,starts:n.inherit(d,{end:/}}/})});return{name:"Handlebars",aliases:["hbs","html.hbs","html.handlebars","htmlbars"],case_insensitive:!0,subLanguage:"xml",contains:[{begin:/\\\{\{/,skip:!0},{begin:/\\\\(?=\{\{)/,skip:!0},n.COMMENT(/\{\{!--/,/--\}\}/),n.COMMENT(/\{\{!/,/\}\}/),{className:"template-tag",begin:/\{\{\{\{(?!\/)/,end:/\}\}\}\}/,contains:[u],starts:{end:/\{\{\{\{\//,returnEnd:!0,subLanguage:"xml"}},{className:"template-tag",begin:/\{\{\{\{\//,end:/\}\}\}\}/,contains:[b]},{className:"template-tag",begin:/\{\{#/,end:/\}\}/,contains:[u]},{className:"template-tag",begin:/\{\{(?=else\}\})/,end:/\}\}/,keywords:"else"},{className:"template-tag",begin:/\{\{\//,end:/\}\}/,contains:[b]},{className:"template-variable",begin:/\{\{\{/,end:/\}\}\}/,contains:[h]},{className:"template-variable",begin:/\{\{/,end:/\}\}/,contains:[h]}]}}}());hljs.registerLanguage("rust",function(){"use strict";return function(e){var n="([ui](8|16|32|64|128|size)|f(32|64))?",t="drop i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize f32 f64 str char bool Box Option Result String Vec Copy Send Sized Sync Drop Fn FnMut FnOnce ToOwned Clone Debug PartialEq PartialOrd Eq Ord AsRef AsMut Into From Default Iterator Extend IntoIterator DoubleEndedIterator ExactSizeIterator SliceConcatExt ToString assert! assert_eq! bitflags! bytes! cfg! col! concat! concat_idents! debug_assert! debug_assert_eq! env! panic! file! format! format_args! include_bin! include_str! line! local_data_key! module_path! option_env! print! println! select! stringify! try! unimplemented! unreachable! vec! write! writeln! macro_rules! assert_ne! debug_assert_ne!";return{name:"Rust",aliases:["rs"],keywords:{$pattern:e.IDENT_RE+"!?",keyword:"abstract as async await become box break const continue crate do dyn else enum extern false final fn for if impl in let loop macro match mod move mut override priv pub ref return self Self static struct super trait true try type typeof unsafe unsized use virtual where while yield",literal:"true false Some None Ok Err",built_in:t},illegal:""}]}}}());hljs.registerLanguage("cpp",function(){"use strict";return function(e){var t=e.getLanguage("c-like").rawDefinition();return t.disableAutodetect=!1,t.name="C++",t.aliases=["cc","c++","h++","hpp","hh","hxx","cxx"],t}}());hljs.registerLanguage("ini",function(){"use strict";function e(e){return e?"string"==typeof e?e:e.source:null}function n(...n){return n.map(n=>e(n)).join("")}return function(a){var s={className:"number",relevance:0,variants:[{begin:/([\+\-]+)?[\d]+_[\d_]+/},{begin:a.NUMBER_RE}]},i=a.COMMENT();i.variants=[{begin:/;/,end:/$/},{begin:/#/,end:/$/}];var t={className:"variable",variants:[{begin:/\$[\w\d"][\w\d_]*/},{begin:/\$\{(.*?)}/}]},r={className:"literal",begin:/\bon|off|true|false|yes|no\b/},l={className:"string",contains:[a.BACKSLASH_ESCAPE],variants:[{begin:"'''",end:"'''",relevance:10},{begin:'"""',end:'"""',relevance:10},{begin:'"',end:'"'},{begin:"'",end:"'"}]},c={begin:/\[/,end:/\]/,contains:[i,r,t,l,s,"self"],relevance:0},g="("+[/[A-Za-z0-9_-]+/,/"(\\"|[^"])*"/,/'[^']*'/].map(n=>e(n)).join("|")+")";return{name:"TOML, also INI",aliases:["toml"],case_insensitive:!0,illegal:/\S/,contains:[i,{className:"section",begin:/\[+/,end:/\]+/},{begin:n(g,"(\\s*\\.\\s*",g,")*",n("(?=",/\s*=\s*[^#\s]/,")")),className:"attr",starts:{end:/$/,contains:[i,c,r,t,l,s]}}]}}}());hljs.registerLanguage("objectivec",function(){"use strict";return function(e){var n=/[a-zA-Z@][a-zA-Z0-9_]*/,_={$pattern:n,keyword:"@interface @class @protocol @implementation"};return{name:"Objective-C",aliases:["mm","objc","obj-c"],keywords:{$pattern:n,keyword:"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required @encode @package @import @defs @compatibility_alias __bridge __bridge_transfer __bridge_retained __bridge_retain __covariant __contravariant __kindof _Nonnull _Nullable _Null_unspecified __FUNCTION__ __PRETTY_FUNCTION__ __attribute__ getter setter retain unsafe_unretained nonnull nullable null_unspecified null_resettable class instancetype NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN",literal:"false true FALSE TRUE nil YES NO NULL",built_in:"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once"},illegal:"/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"class",begin:"("+_.keyword.split(" ").join("|")+")\\b",end:"({|$)",excludeEnd:!0,keywords:_,contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"\\."+e.UNDERSCORE_IDENT_RE,relevance:0}]}}}());hljs.registerLanguage("apache",function(){"use strict";return function(e){var n={className:"number",begin:"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?"};return{name:"Apache config",aliases:["apacheconf"],case_insensitive:!0,contains:[e.HASH_COMMENT_MODE,{className:"section",begin:"",contains:[n,{className:"number",begin:":\\d{1,5}"},e.inherit(e.QUOTE_STRING_MODE,{relevance:0})]},{className:"attribute",begin:/\w+/,relevance:0,keywords:{nomarkup:"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername"},starts:{end:/$/,relevance:0,keywords:{literal:"on off all deny allow"},contains:[{className:"meta",begin:"\\s\\[",end:"\\]$"},{className:"variable",begin:"[\\$%]\\{",end:"\\}",contains:["self",{className:"number",begin:"[\\$%]\\d+"}]},n,{className:"number",begin:"\\d+"},e.QUOTE_STRING_MODE]}}],illegal:/\S/}}}());hljs.registerLanguage("java",function(){"use strict";function e(e){return e?"string"==typeof e?e:e.source:null}function n(e){return a("(",e,")?")}function a(...n){return n.map(n=>e(n)).join("")}function s(...n){return"("+n.map(n=>e(n)).join("|")+")"}return function(e){var t="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",i={className:"meta",begin:"@[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*",contains:[{begin:/\(/,end:/\)/,contains:["self"]}]},r=e=>a("[",e,"]+([",e,"_]*[",e,"]+)?"),c={className:"number",variants:[{begin:`\\b(0[bB]${r("01")})[lL]?`},{begin:`\\b(0${r("0-7")})[dDfFlL]?`},{begin:a(/\b0[xX]/,s(a(r("a-fA-F0-9"),/\./,r("a-fA-F0-9")),a(r("a-fA-F0-9"),/\.?/),a(/\./,r("a-fA-F0-9"))),/([pP][+-]?(\d+))?/,/[fFdDlL]?/)},{begin:a(/\b/,s(a(/\d*\./,r("\\d")),r("\\d")),/[eE][+-]?[\d]+[dDfF]?/)},{begin:a(/\b/,r(/\d/),n(/\.?/),n(r(/\d/)),/[dDfFlL]?/)}],relevance:0};return{name:"Java",aliases:["jsp"],keywords:t,illegal:/<\/|#/,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/,relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"class",beginKeywords:"class interface",end:/[{;=]/,excludeEnd:!0,keywords:"class interface",illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"new throw return else",relevance:0},{className:"function",begin:"([À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(<[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(\\s*,\\s*[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*)*>)?\\s+)+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:t,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/,keywords:t,relevance:0,contains:[i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},c,i]}}}());hljs.registerLanguage("x86asm",function(){"use strict";return function(s){return{name:"Intel x86 Assembly",case_insensitive:!0,keywords:{$pattern:"[.%]?"+s.IDENT_RE,keyword:"lock rep repe repz repne repnz xaquire xrelease bnd nobnd aaa aad aam aas adc add and arpl bb0_reset bb1_reset bound bsf bsr bswap bt btc btr bts call cbw cdq cdqe clc cld cli clts cmc cmp cmpsb cmpsd cmpsq cmpsw cmpxchg cmpxchg486 cmpxchg8b cmpxchg16b cpuid cpu_read cpu_write cqo cwd cwde daa das dec div dmint emms enter equ f2xm1 fabs fadd faddp fbld fbstp fchs fclex fcmovb fcmovbe fcmove fcmovnb fcmovnbe fcmovne fcmovnu fcmovu fcom fcomi fcomip fcomp fcompp fcos fdecstp fdisi fdiv fdivp fdivr fdivrp femms feni ffree ffreep fiadd ficom ficomp fidiv fidivr fild fimul fincstp finit fist fistp fisttp fisub fisubr fld fld1 fldcw fldenv fldl2e fldl2t fldlg2 fldln2 fldpi fldz fmul fmulp fnclex fndisi fneni fninit fnop fnsave fnstcw fnstenv fnstsw fpatan fprem fprem1 fptan frndint frstor fsave fscale fsetpm fsin fsincos fsqrt fst fstcw fstenv fstp fstsw fsub fsubp fsubr fsubrp ftst fucom fucomi fucomip fucomp fucompp fxam fxch fxtract fyl2x fyl2xp1 hlt ibts icebp idiv imul in inc incbin insb insd insw int int01 int1 int03 int3 into invd invpcid invlpg invlpga iret iretd iretq iretw jcxz jecxz jrcxz jmp jmpe lahf lar lds lea leave les lfence lfs lgdt lgs lidt lldt lmsw loadall loadall286 lodsb lodsd lodsq lodsw loop loope loopne loopnz loopz lsl lss ltr mfence monitor mov movd movq movsb movsd movsq movsw movsx movsxd movzx mul mwait neg nop not or out outsb outsd outsw packssdw packsswb packuswb paddb paddd paddsb paddsiw paddsw paddusb paddusw paddw pand pandn pause paveb pavgusb pcmpeqb pcmpeqd pcmpeqw pcmpgtb pcmpgtd pcmpgtw pdistib pf2id pfacc pfadd pfcmpeq pfcmpge pfcmpgt pfmax pfmin pfmul pfrcp pfrcpit1 pfrcpit2 pfrsqit1 pfrsqrt pfsub pfsubr pi2fd pmachriw pmaddwd pmagw pmulhriw pmulhrwa pmulhrwc pmulhw pmullw pmvgezb pmvlzb pmvnzb pmvzb pop popa popad popaw popf popfd popfq popfw por prefetch prefetchw pslld psllq psllw psrad psraw psrld psrlq psrlw psubb psubd psubsb psubsiw psubsw psubusb psubusw psubw punpckhbw punpckhdq punpckhwd punpcklbw punpckldq punpcklwd push pusha pushad pushaw pushf pushfd pushfq pushfw pxor rcl rcr rdshr rdmsr rdpmc rdtsc rdtscp ret retf retn rol ror rdm rsdc rsldt rsm rsts sahf sal salc sar sbb scasb scasd scasq scasw sfence sgdt shl shld shr shrd sidt sldt skinit smi smint smintold smsw stc std sti stosb stosd stosq stosw str sub svdc svldt svts swapgs syscall sysenter sysexit sysret test ud0 ud1 ud2b ud2 ud2a umov verr verw fwait wbinvd wrshr wrmsr xadd xbts xchg xlatb xlat xor cmove cmovz cmovne cmovnz cmova cmovnbe cmovae cmovnb cmovb cmovnae cmovbe cmovna cmovg cmovnle cmovge cmovnl cmovl cmovnge cmovle cmovng cmovc cmovnc cmovo cmovno cmovs cmovns cmovp cmovpe cmovnp cmovpo je jz jne jnz ja jnbe jae jnb jb jnae jbe jna jg jnle jge jnl jl jnge jle jng jc jnc jo jno js jns jpo jnp jpe jp sete setz setne setnz seta setnbe setae setnb setnc setb setnae setcset setbe setna setg setnle setge setnl setl setnge setle setng sets setns seto setno setpe setp setpo setnp addps addss andnps andps cmpeqps cmpeqss cmpleps cmpless cmpltps cmpltss cmpneqps cmpneqss cmpnleps cmpnless cmpnltps cmpnltss cmpordps cmpordss cmpunordps cmpunordss cmpps cmpss comiss cvtpi2ps cvtps2pi cvtsi2ss cvtss2si cvttps2pi cvttss2si divps divss ldmxcsr maxps maxss minps minss movaps movhps movlhps movlps movhlps movmskps movntps movss movups mulps mulss orps rcpps rcpss rsqrtps rsqrtss shufps sqrtps sqrtss stmxcsr subps subss ucomiss unpckhps unpcklps xorps fxrstor fxrstor64 fxsave fxsave64 xgetbv xsetbv xsave xsave64 xsaveopt xsaveopt64 xrstor xrstor64 prefetchnta prefetcht0 prefetcht1 prefetcht2 maskmovq movntq pavgb pavgw pextrw pinsrw pmaxsw pmaxub pminsw pminub pmovmskb pmulhuw psadbw pshufw pf2iw pfnacc pfpnacc pi2fw pswapd maskmovdqu clflush movntdq movnti movntpd movdqa movdqu movdq2q movq2dq paddq pmuludq pshufd pshufhw pshuflw pslldq psrldq psubq punpckhqdq punpcklqdq addpd addsd andnpd andpd cmpeqpd cmpeqsd cmplepd cmplesd cmpltpd cmpltsd cmpneqpd cmpneqsd cmpnlepd cmpnlesd cmpnltpd cmpnltsd cmpordpd cmpordsd cmpunordpd cmpunordsd cmppd comisd cvtdq2pd cvtdq2ps cvtpd2dq cvtpd2pi cvtpd2ps cvtpi2pd cvtps2dq cvtps2pd cvtsd2si cvtsd2ss cvtsi2sd cvtss2sd cvttpd2pi cvttpd2dq cvttps2dq cvttsd2si divpd divsd maxpd maxsd minpd minsd movapd movhpd movlpd movmskpd movupd mulpd mulsd orpd shufpd sqrtpd sqrtsd subpd subsd ucomisd unpckhpd unpcklpd xorpd addsubpd addsubps haddpd haddps hsubpd hsubps lddqu movddup movshdup movsldup clgi stgi vmcall vmclear vmfunc vmlaunch vmload vmmcall vmptrld vmptrst vmread vmresume vmrun vmsave vmwrite vmxoff vmxon invept invvpid pabsb pabsw pabsd palignr phaddw phaddd phaddsw phsubw phsubd phsubsw pmaddubsw pmulhrsw pshufb psignb psignw psignd extrq insertq movntsd movntss lzcnt blendpd blendps blendvpd blendvps dppd dpps extractps insertps movntdqa mpsadbw packusdw pblendvb pblendw pcmpeqq pextrb pextrd pextrq phminposuw pinsrb pinsrd pinsrq pmaxsb pmaxsd pmaxud pmaxuw pminsb pminsd pminud pminuw pmovsxbw pmovsxbd pmovsxbq pmovsxwd pmovsxwq pmovsxdq pmovzxbw pmovzxbd pmovzxbq pmovzxwd pmovzxwq pmovzxdq pmuldq pmulld ptest roundpd roundps roundsd roundss crc32 pcmpestri pcmpestrm pcmpistri pcmpistrm pcmpgtq popcnt getsec pfrcpv pfrsqrtv movbe aesenc aesenclast aesdec aesdeclast aesimc aeskeygenassist vaesenc vaesenclast vaesdec vaesdeclast vaesimc vaeskeygenassist vaddpd vaddps vaddsd vaddss vaddsubpd vaddsubps vandpd vandps vandnpd vandnps vblendpd vblendps vblendvpd vblendvps vbroadcastss vbroadcastsd vbroadcastf128 vcmpeq_ospd vcmpeqpd vcmplt_ospd vcmpltpd vcmple_ospd vcmplepd vcmpunord_qpd vcmpunordpd vcmpneq_uqpd vcmpneqpd vcmpnlt_uspd vcmpnltpd vcmpnle_uspd vcmpnlepd vcmpord_qpd vcmpordpd vcmpeq_uqpd vcmpnge_uspd vcmpngepd vcmpngt_uspd vcmpngtpd vcmpfalse_oqpd vcmpfalsepd vcmpneq_oqpd vcmpge_ospd vcmpgepd vcmpgt_ospd vcmpgtpd vcmptrue_uqpd vcmptruepd vcmplt_oqpd vcmple_oqpd vcmpunord_spd vcmpneq_uspd vcmpnlt_uqpd vcmpnle_uqpd vcmpord_spd vcmpeq_uspd vcmpnge_uqpd vcmpngt_uqpd vcmpfalse_ospd vcmpneq_ospd vcmpge_oqpd vcmpgt_oqpd vcmptrue_uspd vcmppd vcmpeq_osps vcmpeqps vcmplt_osps vcmpltps vcmple_osps vcmpleps vcmpunord_qps vcmpunordps vcmpneq_uqps vcmpneqps vcmpnlt_usps vcmpnltps vcmpnle_usps vcmpnleps vcmpord_qps vcmpordps vcmpeq_uqps vcmpnge_usps vcmpngeps vcmpngt_usps vcmpngtps vcmpfalse_oqps vcmpfalseps vcmpneq_oqps vcmpge_osps vcmpgeps vcmpgt_osps vcmpgtps vcmptrue_uqps vcmptrueps vcmplt_oqps vcmple_oqps vcmpunord_sps vcmpneq_usps vcmpnlt_uqps vcmpnle_uqps vcmpord_sps vcmpeq_usps vcmpnge_uqps vcmpngt_uqps vcmpfalse_osps vcmpneq_osps vcmpge_oqps vcmpgt_oqps vcmptrue_usps vcmpps vcmpeq_ossd vcmpeqsd vcmplt_ossd vcmpltsd vcmple_ossd vcmplesd vcmpunord_qsd vcmpunordsd vcmpneq_uqsd vcmpneqsd vcmpnlt_ussd vcmpnltsd vcmpnle_ussd vcmpnlesd vcmpord_qsd vcmpordsd vcmpeq_uqsd vcmpnge_ussd vcmpngesd vcmpngt_ussd vcmpngtsd vcmpfalse_oqsd vcmpfalsesd vcmpneq_oqsd vcmpge_ossd vcmpgesd vcmpgt_ossd vcmpgtsd vcmptrue_uqsd vcmptruesd vcmplt_oqsd vcmple_oqsd vcmpunord_ssd vcmpneq_ussd vcmpnlt_uqsd vcmpnle_uqsd vcmpord_ssd vcmpeq_ussd vcmpnge_uqsd vcmpngt_uqsd vcmpfalse_ossd vcmpneq_ossd vcmpge_oqsd vcmpgt_oqsd vcmptrue_ussd vcmpsd vcmpeq_osss vcmpeqss vcmplt_osss vcmpltss vcmple_osss vcmpless vcmpunord_qss vcmpunordss vcmpneq_uqss vcmpneqss vcmpnlt_usss vcmpnltss vcmpnle_usss vcmpnless vcmpord_qss vcmpordss vcmpeq_uqss vcmpnge_usss vcmpngess vcmpngt_usss vcmpngtss vcmpfalse_oqss vcmpfalsess vcmpneq_oqss vcmpge_osss vcmpgess vcmpgt_osss vcmpgtss vcmptrue_uqss vcmptruess vcmplt_oqss vcmple_oqss vcmpunord_sss vcmpneq_usss vcmpnlt_uqss vcmpnle_uqss vcmpord_sss vcmpeq_usss vcmpnge_uqss vcmpngt_uqss vcmpfalse_osss vcmpneq_osss vcmpge_oqss vcmpgt_oqss vcmptrue_usss vcmpss vcomisd vcomiss vcvtdq2pd vcvtdq2ps vcvtpd2dq vcvtpd2ps vcvtps2dq vcvtps2pd vcvtsd2si vcvtsd2ss vcvtsi2sd vcvtsi2ss vcvtss2sd vcvtss2si vcvttpd2dq vcvttps2dq vcvttsd2si vcvttss2si vdivpd vdivps vdivsd vdivss vdppd vdpps vextractf128 vextractps vhaddpd vhaddps vhsubpd vhsubps vinsertf128 vinsertps vlddqu vldqqu vldmxcsr vmaskmovdqu vmaskmovps vmaskmovpd vmaxpd vmaxps vmaxsd vmaxss vminpd vminps vminsd vminss vmovapd vmovaps vmovd vmovq vmovddup vmovdqa vmovqqa vmovdqu vmovqqu vmovhlps vmovhpd vmovhps vmovlhps vmovlpd vmovlps vmovmskpd vmovmskps vmovntdq vmovntqq vmovntdqa vmovntpd vmovntps vmovsd vmovshdup vmovsldup vmovss vmovupd vmovups vmpsadbw vmulpd vmulps vmulsd vmulss vorpd vorps vpabsb vpabsw vpabsd vpacksswb vpackssdw vpackuswb vpackusdw vpaddb vpaddw vpaddd vpaddq vpaddsb vpaddsw vpaddusb vpaddusw vpalignr vpand vpandn vpavgb vpavgw vpblendvb vpblendw vpcmpestri vpcmpestrm vpcmpistri vpcmpistrm vpcmpeqb vpcmpeqw vpcmpeqd vpcmpeqq vpcmpgtb vpcmpgtw vpcmpgtd vpcmpgtq vpermilpd vpermilps vperm2f128 vpextrb vpextrw vpextrd vpextrq vphaddw vphaddd vphaddsw vphminposuw vphsubw vphsubd vphsubsw vpinsrb vpinsrw vpinsrd vpinsrq vpmaddwd vpmaddubsw vpmaxsb vpmaxsw vpmaxsd vpmaxub vpmaxuw vpmaxud vpminsb vpminsw vpminsd vpminub vpminuw vpminud vpmovmskb vpmovsxbw vpmovsxbd vpmovsxbq vpmovsxwd vpmovsxwq vpmovsxdq vpmovzxbw vpmovzxbd vpmovzxbq vpmovzxwd vpmovzxwq vpmovzxdq vpmulhuw vpmulhrsw vpmulhw vpmullw vpmulld vpmuludq vpmuldq vpor vpsadbw vpshufb vpshufd vpshufhw vpshuflw vpsignb vpsignw vpsignd vpslldq vpsrldq vpsllw vpslld vpsllq vpsraw vpsrad vpsrlw vpsrld vpsrlq vptest vpsubb vpsubw vpsubd vpsubq vpsubsb vpsubsw vpsubusb vpsubusw vpunpckhbw vpunpckhwd vpunpckhdq vpunpckhqdq vpunpcklbw vpunpcklwd vpunpckldq vpunpcklqdq vpxor vrcpps vrcpss vrsqrtps vrsqrtss vroundpd vroundps vroundsd vroundss vshufpd vshufps vsqrtpd vsqrtps vsqrtsd vsqrtss vstmxcsr vsubpd vsubps vsubsd vsubss vtestps vtestpd vucomisd vucomiss vunpckhpd vunpckhps vunpcklpd vunpcklps vxorpd vxorps vzeroall vzeroupper pclmullqlqdq pclmulhqlqdq pclmullqhqdq pclmulhqhqdq pclmulqdq vpclmullqlqdq vpclmulhqlqdq vpclmullqhqdq vpclmulhqhqdq vpclmulqdq vfmadd132ps vfmadd132pd vfmadd312ps vfmadd312pd vfmadd213ps vfmadd213pd vfmadd123ps vfmadd123pd vfmadd231ps vfmadd231pd vfmadd321ps vfmadd321pd vfmaddsub132ps vfmaddsub132pd vfmaddsub312ps vfmaddsub312pd vfmaddsub213ps vfmaddsub213pd vfmaddsub123ps vfmaddsub123pd vfmaddsub231ps vfmaddsub231pd vfmaddsub321ps vfmaddsub321pd vfmsub132ps vfmsub132pd vfmsub312ps vfmsub312pd vfmsub213ps vfmsub213pd vfmsub123ps vfmsub123pd vfmsub231ps vfmsub231pd vfmsub321ps vfmsub321pd vfmsubadd132ps vfmsubadd132pd vfmsubadd312ps vfmsubadd312pd vfmsubadd213ps vfmsubadd213pd vfmsubadd123ps vfmsubadd123pd vfmsubadd231ps vfmsubadd231pd vfmsubadd321ps vfmsubadd321pd vfnmadd132ps vfnmadd132pd vfnmadd312ps vfnmadd312pd vfnmadd213ps vfnmadd213pd vfnmadd123ps vfnmadd123pd vfnmadd231ps vfnmadd231pd vfnmadd321ps vfnmadd321pd vfnmsub132ps vfnmsub132pd vfnmsub312ps vfnmsub312pd vfnmsub213ps vfnmsub213pd vfnmsub123ps vfnmsub123pd vfnmsub231ps vfnmsub231pd vfnmsub321ps vfnmsub321pd vfmadd132ss vfmadd132sd vfmadd312ss vfmadd312sd vfmadd213ss vfmadd213sd vfmadd123ss vfmadd123sd vfmadd231ss vfmadd231sd vfmadd321ss vfmadd321sd vfmsub132ss vfmsub132sd vfmsub312ss vfmsub312sd vfmsub213ss vfmsub213sd vfmsub123ss vfmsub123sd vfmsub231ss vfmsub231sd vfmsub321ss vfmsub321sd vfnmadd132ss vfnmadd132sd vfnmadd312ss vfnmadd312sd vfnmadd213ss vfnmadd213sd vfnmadd123ss vfnmadd123sd vfnmadd231ss vfnmadd231sd vfnmadd321ss vfnmadd321sd vfnmsub132ss vfnmsub132sd vfnmsub312ss vfnmsub312sd vfnmsub213ss vfnmsub213sd vfnmsub123ss vfnmsub123sd vfnmsub231ss vfnmsub231sd vfnmsub321ss vfnmsub321sd rdfsbase rdgsbase rdrand wrfsbase wrgsbase vcvtph2ps vcvtps2ph adcx adox rdseed clac stac xstore xcryptecb xcryptcbc xcryptctr xcryptcfb xcryptofb montmul xsha1 xsha256 llwpcb slwpcb lwpval lwpins vfmaddpd vfmaddps vfmaddsd vfmaddss vfmaddsubpd vfmaddsubps vfmsubaddpd vfmsubaddps vfmsubpd vfmsubps vfmsubsd vfmsubss vfnmaddpd vfnmaddps vfnmaddsd vfnmaddss vfnmsubpd vfnmsubps vfnmsubsd vfnmsubss vfrczpd vfrczps vfrczsd vfrczss vpcmov vpcomb vpcomd vpcomq vpcomub vpcomud vpcomuq vpcomuw vpcomw vphaddbd vphaddbq vphaddbw vphadddq vphaddubd vphaddubq vphaddubw vphaddudq vphadduwd vphadduwq vphaddwd vphaddwq vphsubbw vphsubdq vphsubwd vpmacsdd vpmacsdqh vpmacsdql vpmacssdd vpmacssdqh vpmacssdql vpmacsswd vpmacssww vpmacswd vpmacsww vpmadcsswd vpmadcswd vpperm vprotb vprotd vprotq vprotw vpshab vpshad vpshaq vpshaw vpshlb vpshld vpshlq vpshlw vbroadcasti128 vpblendd vpbroadcastb vpbroadcastw vpbroadcastd vpbroadcastq vpermd vpermpd vpermps vpermq vperm2i128 vextracti128 vinserti128 vpmaskmovd vpmaskmovq vpsllvd vpsllvq vpsravd vpsrlvd vpsrlvq vgatherdpd vgatherqpd vgatherdps vgatherqps vpgatherdd vpgatherqd vpgatherdq vpgatherqq xabort xbegin xend xtest andn bextr blci blcic blsi blsic blcfill blsfill blcmsk blsmsk blsr blcs bzhi mulx pdep pext rorx sarx shlx shrx tzcnt tzmsk t1mskc valignd valignq vblendmpd vblendmps vbroadcastf32x4 vbroadcastf64x4 vbroadcasti32x4 vbroadcasti64x4 vcompresspd vcompressps vcvtpd2udq vcvtps2udq vcvtsd2usi vcvtss2usi vcvttpd2udq vcvttps2udq vcvttsd2usi vcvttss2usi vcvtudq2pd vcvtudq2ps vcvtusi2sd vcvtusi2ss vexpandpd vexpandps vextractf32x4 vextractf64x4 vextracti32x4 vextracti64x4 vfixupimmpd vfixupimmps vfixupimmsd vfixupimmss vgetexppd vgetexpps vgetexpsd vgetexpss vgetmantpd vgetmantps vgetmantsd vgetmantss vinsertf32x4 vinsertf64x4 vinserti32x4 vinserti64x4 vmovdqa32 vmovdqa64 vmovdqu32 vmovdqu64 vpabsq vpandd vpandnd vpandnq vpandq vpblendmd vpblendmq vpcmpltd vpcmpled vpcmpneqd vpcmpnltd vpcmpnled vpcmpd vpcmpltq vpcmpleq vpcmpneqq vpcmpnltq vpcmpnleq vpcmpq vpcmpequd vpcmpltud vpcmpleud vpcmpnequd vpcmpnltud vpcmpnleud vpcmpud vpcmpequq vpcmpltuq vpcmpleuq vpcmpnequq vpcmpnltuq vpcmpnleuq vpcmpuq vpcompressd vpcompressq vpermi2d vpermi2pd vpermi2ps vpermi2q vpermt2d vpermt2pd vpermt2ps vpermt2q vpexpandd vpexpandq vpmaxsq vpmaxuq vpminsq vpminuq vpmovdb vpmovdw vpmovqb vpmovqd vpmovqw vpmovsdb vpmovsdw vpmovsqb vpmovsqd vpmovsqw vpmovusdb vpmovusdw vpmovusqb vpmovusqd vpmovusqw vpord vporq vprold vprolq vprolvd vprolvq vprord vprorq vprorvd vprorvq vpscatterdd vpscatterdq vpscatterqd vpscatterqq vpsraq vpsravq vpternlogd vpternlogq vptestmd vptestmq vptestnmd vptestnmq vpxord vpxorq vrcp14pd vrcp14ps vrcp14sd vrcp14ss vrndscalepd vrndscaleps vrndscalesd vrndscaless vrsqrt14pd vrsqrt14ps vrsqrt14sd vrsqrt14ss vscalefpd vscalefps vscalefsd vscalefss vscatterdpd vscatterdps vscatterqpd vscatterqps vshuff32x4 vshuff64x2 vshufi32x4 vshufi64x2 kandnw kandw kmovw knotw kortestw korw kshiftlw kshiftrw kunpckbw kxnorw kxorw vpbroadcastmb2q vpbroadcastmw2d vpconflictd vpconflictq vplzcntd vplzcntq vexp2pd vexp2ps vrcp28pd vrcp28ps vrcp28sd vrcp28ss vrsqrt28pd vrsqrt28ps vrsqrt28sd vrsqrt28ss vgatherpf0dpd vgatherpf0dps vgatherpf0qpd vgatherpf0qps vgatherpf1dpd vgatherpf1dps vgatherpf1qpd vgatherpf1qps vscatterpf0dpd vscatterpf0dps vscatterpf0qpd vscatterpf0qps vscatterpf1dpd vscatterpf1dps vscatterpf1qpd vscatterpf1qps prefetchwt1 bndmk bndcl bndcu bndcn bndmov bndldx bndstx sha1rnds4 sha1nexte sha1msg1 sha1msg2 sha256rnds2 sha256msg1 sha256msg2 hint_nop0 hint_nop1 hint_nop2 hint_nop3 hint_nop4 hint_nop5 hint_nop6 hint_nop7 hint_nop8 hint_nop9 hint_nop10 hint_nop11 hint_nop12 hint_nop13 hint_nop14 hint_nop15 hint_nop16 hint_nop17 hint_nop18 hint_nop19 hint_nop20 hint_nop21 hint_nop22 hint_nop23 hint_nop24 hint_nop25 hint_nop26 hint_nop27 hint_nop28 hint_nop29 hint_nop30 hint_nop31 hint_nop32 hint_nop33 hint_nop34 hint_nop35 hint_nop36 hint_nop37 hint_nop38 hint_nop39 hint_nop40 hint_nop41 hint_nop42 hint_nop43 hint_nop44 hint_nop45 hint_nop46 hint_nop47 hint_nop48 hint_nop49 hint_nop50 hint_nop51 hint_nop52 hint_nop53 hint_nop54 hint_nop55 hint_nop56 hint_nop57 hint_nop58 hint_nop59 hint_nop60 hint_nop61 hint_nop62 hint_nop63",built_in:"ip eip rip al ah bl bh cl ch dl dh sil dil bpl spl r8b r9b r10b r11b r12b r13b r14b r15b ax bx cx dx si di bp sp r8w r9w r10w r11w r12w r13w r14w r15w eax ebx ecx edx esi edi ebp esp eip r8d r9d r10d r11d r12d r13d r14d r15d rax rbx rcx rdx rsi rdi rbp rsp r8 r9 r10 r11 r12 r13 r14 r15 cs ds es fs gs ss st st0 st1 st2 st3 st4 st5 st6 st7 mm0 mm1 mm2 mm3 mm4 mm5 mm6 mm7 xmm0 xmm1 xmm2 xmm3 xmm4 xmm5 xmm6 xmm7 xmm8 xmm9 xmm10 xmm11 xmm12 xmm13 xmm14 xmm15 xmm16 xmm17 xmm18 xmm19 xmm20 xmm21 xmm22 xmm23 xmm24 xmm25 xmm26 xmm27 xmm28 xmm29 xmm30 xmm31 ymm0 ymm1 ymm2 ymm3 ymm4 ymm5 ymm6 ymm7 ymm8 ymm9 ymm10 ymm11 ymm12 ymm13 ymm14 ymm15 ymm16 ymm17 ymm18 ymm19 ymm20 ymm21 ymm22 ymm23 ymm24 ymm25 ymm26 ymm27 ymm28 ymm29 ymm30 ymm31 zmm0 zmm1 zmm2 zmm3 zmm4 zmm5 zmm6 zmm7 zmm8 zmm9 zmm10 zmm11 zmm12 zmm13 zmm14 zmm15 zmm16 zmm17 zmm18 zmm19 zmm20 zmm21 zmm22 zmm23 zmm24 zmm25 zmm26 zmm27 zmm28 zmm29 zmm30 zmm31 k0 k1 k2 k3 k4 k5 k6 k7 bnd0 bnd1 bnd2 bnd3 cr0 cr1 cr2 cr3 cr4 cr8 dr0 dr1 dr2 dr3 dr8 tr3 tr4 tr5 tr6 tr7 r0 r1 r2 r3 r4 r5 r6 r7 r0b r1b r2b r3b r4b r5b r6b r7b r0w r1w r2w r3w r4w r5w r6w r7w r0d r1d r2d r3d r4d r5d r6d r7d r0h r1h r2h r3h r0l r1l r2l r3l r4l r5l r6l r7l r8l r9l r10l r11l r12l r13l r14l r15l db dw dd dq dt ddq do dy dz resb resw resd resq rest resdq reso resy resz incbin equ times byte word dword qword nosplit rel abs seg wrt strict near far a32 ptr",meta:"%define %xdefine %+ %undef %defstr %deftok %assign %strcat %strlen %substr %rotate %elif %else %endif %if %ifmacro %ifctx %ifidn %ifidni %ifid %ifnum %ifstr %iftoken %ifempty %ifenv %error %warning %fatal %rep %endrep %include %push %pop %repl %pathsearch %depend %use %arg %stacksize %local %line %comment %endcomment .nolist __FILE__ __LINE__ __SECT__ __BITS__ __OUTPUT_FORMAT__ __DATE__ __TIME__ __DATE_NUM__ __TIME_NUM__ __UTC_DATE__ __UTC_TIME__ __UTC_DATE_NUM__ __UTC_TIME_NUM__ __PASS__ struc endstruc istruc at iend align alignb sectalign daz nodaz up down zero default option assume public bits use16 use32 use64 default section segment absolute extern global common cpu float __utf16__ __utf16le__ __utf16be__ __utf32__ __utf32le__ __utf32be__ __float8__ __float16__ __float32__ __float64__ __float80m__ __float80e__ __float128l__ __float128h__ __Infinity__ __QNaN__ __SNaN__ Inf NaN QNaN SNaN float8 float16 float32 float64 float80m float80e float128l float128h __FLOAT_DAZ__ __FLOAT_ROUND__ __FLOAT__"},contains:[s.COMMENT(";","$",{relevance:0}),{className:"number",variants:[{begin:"\\b(?:([0-9][0-9_]*)?\\.[0-9_]*(?:[eE][+-]?[0-9_]+)?|(0[Xx])?[0-9][0-9_]*\\.?[0-9_]*(?:[pP](?:[+-]?[0-9_]+)?)?)\\b",relevance:0},{begin:"\\$[0-9][0-9A-Fa-f]*",relevance:0},{begin:"\\b(?:[0-9A-Fa-f][0-9A-Fa-f_]*[Hh]|[0-9][0-9_]*[DdTt]?|[0-7][0-7_]*[QqOo]|[0-1][0-1_]*[BbYy])\\b"},{begin:"\\b(?:0[Xx][0-9A-Fa-f_]+|0[DdTt][0-9_]+|0[QqOo][0-7_]+|0[BbYy][0-1_]+)\\b"}]},s.QUOTE_STRING_MODE,{className:"string",variants:[{begin:"'",end:"[^\\\\]'"},{begin:"`",end:"[^\\\\]`"}],relevance:0},{className:"symbol",variants:[{begin:"^\\s*[A-Za-z._?][A-Za-z0-9_$#@~.?]*(:|\\s+label)"},{begin:"^\\s*%%[A-Za-z0-9_$#@~.?]*:"}],relevance:0},{className:"subst",begin:"%[0-9]+",relevance:0},{className:"subst",begin:"%!S+",relevance:0},{className:"meta",begin:/^\s*\.[\w_-]+/}]}}}());hljs.registerLanguage("kotlin",function(){"use strict";return function(e){var n={keyword:"abstract as val var vararg get set class object open private protected public noinline crossinline dynamic final enum if else do while for when throw try catch finally import package is in fun override companion reified inline lateinit init interface annotation data sealed internal infix operator out by constructor super tailrec where const inner suspend typealias external expect actual trait volatile transient native default",built_in:"Byte Short Char Int Long Boolean Float Double Void Unit Nothing",literal:"true false null"},a={className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"@"},i={className:"subst",begin:"\\${",end:"}",contains:[e.C_NUMBER_MODE]},s={className:"variable",begin:"\\$"+e.UNDERSCORE_IDENT_RE},t={className:"string",variants:[{begin:'"""',end:'"""(?=[^"])',contains:[s,i]},{begin:"'",end:"'",illegal:/\n/,contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"',illegal:/\n/,contains:[e.BACKSLASH_ESCAPE,s,i]}]};i.contains.push(t);var r={className:"meta",begin:"@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\s*:(?:\\s*"+e.UNDERSCORE_IDENT_RE+")?"},l={className:"meta",begin:"@"+e.UNDERSCORE_IDENT_RE,contains:[{begin:/\(/,end:/\)/,contains:[e.inherit(t,{className:"meta-string"})]}]},c=e.COMMENT("/\\*","\\*/",{contains:[e.C_BLOCK_COMMENT_MODE]}),o={variants:[{className:"type",begin:e.UNDERSCORE_IDENT_RE},{begin:/\(/,end:/\)/,contains:[]}]},d=o;return d.variants[1].contains=[o],o.variants[1].contains=[d],{name:"Kotlin",aliases:["kt"],keywords:n,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,c,{className:"keyword",begin:/\b(break|continue|return|this)\b/,starts:{contains:[{className:"symbol",begin:/@\w+/}]}},a,r,l,{className:"function",beginKeywords:"fun",end:"[(]|$",returnBegin:!0,excludeEnd:!0,keywords:n,illegal:/fun\s+(<.*>)?[^\s\(]+(\s+[^\s\(]+)\s*=/,relevance:5,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"type",begin://,keywords:"reified",relevance:0},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:n,relevance:0,contains:[{begin:/:/,end:/[=,\/]/,endsWithParent:!0,contains:[o,e.C_LINE_COMMENT_MODE,c],relevance:0},e.C_LINE_COMMENT_MODE,c,r,l,t,e.C_NUMBER_MODE]},c]},{className:"class",beginKeywords:"class interface trait",end:/[:\{(]|$/,excludeEnd:!0,illegal:"extends implements",contains:[{beginKeywords:"public protected internal private constructor"},e.UNDERSCORE_TITLE_MODE,{className:"type",begin://,excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:/[,:]\s*/,end:/[<\(,]|$/,excludeBegin:!0,returnEnd:!0},r,l]},t,{className:"meta",begin:"^#!/usr/bin/env",end:"$",illegal:"\n"},{className:"number",begin:"\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",relevance:0}]}}}());hljs.registerLanguage("armasm",function(){"use strict";return function(s){const e={variants:[s.COMMENT("^[ \\t]*(?=#)","$",{relevance:0,excludeBegin:!0}),s.COMMENT("[;@]","$",{relevance:0}),s.C_LINE_COMMENT_MODE,s.C_BLOCK_COMMENT_MODE]};return{name:"ARM Assembly",case_insensitive:!0,aliases:["arm"],keywords:{$pattern:"\\.?"+s.IDENT_RE,meta:".2byte .4byte .align .ascii .asciz .balign .byte .code .data .else .end .endif .endm .endr .equ .err .exitm .extern .global .hword .if .ifdef .ifndef .include .irp .long .macro .rept .req .section .set .skip .space .text .word .arm .thumb .code16 .code32 .force_thumb .thumb_func .ltorg ALIAS ALIGN ARM AREA ASSERT ATTR CN CODE CODE16 CODE32 COMMON CP DATA DCB DCD DCDU DCDO DCFD DCFDU DCI DCQ DCQU DCW DCWU DN ELIF ELSE END ENDFUNC ENDIF ENDP ENTRY EQU EXPORT EXPORTAS EXTERN FIELD FILL FUNCTION GBLA GBLL GBLS GET GLOBAL IF IMPORT INCBIN INCLUDE INFO KEEP LCLA LCLL LCLS LTORG MACRO MAP MEND MEXIT NOFP OPT PRESERVE8 PROC QN READONLY RELOC REQUIRE REQUIRE8 RLIST FN ROUT SETA SETL SETS SN SPACE SUBT THUMB THUMBX TTL WHILE WEND ",built_in:"r0 r1 r2 r3 r4 r5 r6 r7 r8 r9 r10 r11 r12 r13 r14 r15 pc lr sp ip sl sb fp a1 a2 a3 a4 v1 v2 v3 v4 v5 v6 v7 v8 f0 f1 f2 f3 f4 f5 f6 f7 p0 p1 p2 p3 p4 p5 p6 p7 p8 p9 p10 p11 p12 p13 p14 p15 c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 q0 q1 q2 q3 q4 q5 q6 q7 q8 q9 q10 q11 q12 q13 q14 q15 cpsr_c cpsr_x cpsr_s cpsr_f cpsr_cx cpsr_cxs cpsr_xs cpsr_xsf cpsr_sf cpsr_cxsf spsr_c spsr_x spsr_s spsr_f spsr_cx spsr_cxs spsr_xs spsr_xsf spsr_sf spsr_cxsf s0 s1 s2 s3 s4 s5 s6 s7 s8 s9 s10 s11 s12 s13 s14 s15 s16 s17 s18 s19 s20 s21 s22 s23 s24 s25 s26 s27 s28 s29 s30 s31 d0 d1 d2 d3 d4 d5 d6 d7 d8 d9 d10 d11 d12 d13 d14 d15 d16 d17 d18 d19 d20 d21 d22 d23 d24 d25 d26 d27 d28 d29 d30 d31 {PC} {VAR} {TRUE} {FALSE} {OPT} {CONFIG} {ENDIAN} {CODESIZE} {CPU} {FPU} {ARCHITECTURE} {PCSTOREOFFSET} {ARMASM_VERSION} {INTER} {ROPI} {RWPI} {SWST} {NOSWST} . @"},contains:[{className:"keyword",begin:"\\b(adc|(qd?|sh?|u[qh]?)?add(8|16)?|usada?8|(q|sh?|u[qh]?)?(as|sa)x|and|adrl?|sbc|rs[bc]|asr|b[lx]?|blx|bxj|cbn?z|tb[bh]|bic|bfc|bfi|[su]bfx|bkpt|cdp2?|clz|clrex|cmp|cmn|cpsi[ed]|cps|setend|dbg|dmb|dsb|eor|isb|it[te]{0,3}|lsl|lsr|ror|rrx|ldm(([id][ab])|f[ds])?|ldr((s|ex)?[bhd])?|movt?|mvn|mra|mar|mul|[us]mull|smul[bwt][bt]|smu[as]d|smmul|smmla|mla|umlaal|smlal?([wbt][bt]|d)|mls|smlsl?[ds]|smc|svc|sev|mia([bt]{2}|ph)?|mrr?c2?|mcrr2?|mrs|msr|orr|orn|pkh(tb|bt)|rbit|rev(16|sh)?|sel|[su]sat(16)?|nop|pop|push|rfe([id][ab])?|stm([id][ab])?|str(ex)?[bhd]?|(qd?)?sub|(sh?|q|u[qh]?)?sub(8|16)|[su]xt(a?h|a?b(16)?)|srs([id][ab])?|swpb?|swi|smi|tst|teq|wfe|wfi|yield)(eq|ne|cs|cc|mi|pl|vs|vc|hi|ls|ge|lt|gt|le|al|hs|lo)?[sptrx]?(?=\\s)"},e,s.QUOTE_STRING_MODE,{className:"string",begin:"'",end:"[^\\\\]'",relevance:0},{className:"title",begin:"\\|",end:"\\|",illegal:"\\n",relevance:0},{className:"number",variants:[{begin:"[#$=]?0x[0-9a-f]+"},{begin:"[#$=]?0b[01]+"},{begin:"[#$=]\\d+"},{begin:"\\b\\d+"}],relevance:0},{className:"symbol",variants:[{begin:"^[ \\t]*[a-z_\\.\\$][a-z0-9_\\.\\$]+:"},{begin:"^[a-z_\\.\\$][a-z0-9_\\.\\$]+"},{begin:"[=#]\\w+"}],relevance:0}]}}}());hljs.registerLanguage("go",function(){"use strict";return function(e){var n={keyword:"break default func interface select case map struct chan else goto package switch const fallthrough if range type continue for import return var go defer bool byte complex64 complex128 float32 float64 int8 int16 int32 int64 string uint8 uint16 uint32 uint64 int uint uintptr rune",literal:"true false iota nil",built_in:"append cap close complex copy imag len make new panic print println real recover delete"};return{name:"Go",aliases:["golang"],keywords:n,illegal:">>|\.\.\.) /},i={className:"subst",begin:/\{/,end:/\}/,keywords:n,illegal:/#/},s={begin:/\{\{/,relevance:0},r={className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:/(u|b)?r?'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(u|b)?r?"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(fr|rf|f)'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a,s,i]},{begin:/(fr|rf|f)"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a,s,i]},{begin:/(u|r|ur)'/,end:/'/,relevance:10},{begin:/(u|r|ur)"/,end:/"/,relevance:10},{begin:/(b|br)'/,end:/'/},{begin:/(b|br)"/,end:/"/},{begin:/(fr|rf|f)'/,end:/'/,contains:[e.BACKSLASH_ESCAPE,s,i]},{begin:/(fr|rf|f)"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,s,i]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},l={className:"number",relevance:0,variants:[{begin:e.BINARY_NUMBER_RE+"[lLjJ]?"},{begin:"\\b(0o[0-7]+)[lLjJ]?"},{begin:e.C_NUMBER_RE+"[lLjJ]?"}]},t={className:"params",variants:[{begin:/\(\s*\)/,skip:!0,className:null},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,contains:["self",a,l,r,e.HASH_COMMENT_MODE]}]};return i.contains=[r,l,a],{name:"Python",aliases:["py","gyp","ipython"],keywords:n,illegal:/(<\/|->|\?)|=>/,contains:[a,l,{beginKeywords:"if",relevance:0},r,e.HASH_COMMENT_MODE,{variants:[{className:"function",beginKeywords:"def"},{className:"class",beginKeywords:"class"}],end:/:/,illegal:/[${=;\n,]/,contains:[e.UNDERSCORE_TITLE_MODE,t,{begin:/->/,endsWithParent:!0,keywords:"None"}]},{className:"meta",begin:/^[\t ]*@/,end:/$/},{begin:/\b(print|exec)\(/}]}}}());hljs.registerLanguage("shell",function(){"use strict";return function(s){return{name:"Shell Session",aliases:["console"],contains:[{className:"meta",begin:"^\\s{0,3}[/\\w\\d\\[\\]()@-]*[>%$#]",starts:{end:"$",subLanguage:"bash"}}]}}}());hljs.registerLanguage("scala",function(){"use strict";return function(e){var n={className:"subst",variants:[{begin:"\\$[A-Za-z0-9_]+"},{begin:"\\${",end:"}"}]},a={className:"string",variants:[{begin:'"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:'"""',end:'"""',relevance:10},{begin:'[a-z]+"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE,n]},{className:"string",begin:'[a-z]+"""',end:'"""',contains:[n],relevance:10}]},s={className:"type",begin:"\\b[A-Z][A-Za-z0-9_]*",relevance:0},t={className:"title",begin:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,relevance:0},i={className:"class",beginKeywords:"class object trait type",end:/[:={\[\n;]/,excludeEnd:!0,contains:[{beginKeywords:"extends with",relevance:10},{begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[s]},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[s]},t]},l={className:"function",beginKeywords:"def",end:/[:={\[(\n;]/,excludeEnd:!0,contains:[t]};return{name:"Scala",keywords:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,{className:"symbol",begin:"'\\w[\\w\\d_]*(?!')"},s,l,i,e.C_NUMBER_MODE,{className:"meta",begin:"@[A-Za-z]+"}]}}}());hljs.registerLanguage("julia",function(){"use strict";return function(e){var r="[A-Za-z_\\u00A1-\\uFFFF][A-Za-z_0-9\\u00A1-\\uFFFF]*",t={$pattern:r,keyword:"in isa where baremodule begin break catch ccall const continue do else elseif end export false finally for function global if import importall let local macro module quote return true try using while type immutable abstract bitstype typealias ",literal:"true false ARGS C_NULL DevNull ENDIAN_BOM ENV I Inf Inf16 Inf32 Inf64 InsertionSort JULIA_HOME LOAD_PATH MergeSort NaN NaN16 NaN32 NaN64 PROGRAM_FILE QuickSort RoundDown RoundFromZero RoundNearest RoundNearestTiesAway RoundNearestTiesUp RoundToZero RoundUp STDERR STDIN STDOUT VERSION catalan e|0 eu|0 eulergamma golden im nothing pi γ π φ ",built_in:"ANY AbstractArray AbstractChannel AbstractFloat AbstractMatrix AbstractRNG AbstractSerializer AbstractSet AbstractSparseArray AbstractSparseMatrix AbstractSparseVector AbstractString AbstractUnitRange AbstractVecOrMat AbstractVector Any ArgumentError Array AssertionError Associative Base64DecodePipe Base64EncodePipe Bidiagonal BigFloat BigInt BitArray BitMatrix BitVector Bool BoundsError BufferStream CachingPool CapturedException CartesianIndex CartesianRange Cchar Cdouble Cfloat Channel Char Cint Cintmax_t Clong Clonglong ClusterManager Cmd CodeInfo Colon Complex Complex128 Complex32 Complex64 CompositeException Condition ConjArray ConjMatrix ConjVector Cptrdiff_t Cshort Csize_t Cssize_t Cstring Cuchar Cuint Cuintmax_t Culong Culonglong Cushort Cwchar_t Cwstring DataType Date DateFormat DateTime DenseArray DenseMatrix DenseVecOrMat DenseVector Diagonal Dict DimensionMismatch Dims DirectIndexString Display DivideError DomainError EOFError EachLine Enum Enumerate ErrorException Exception ExponentialBackOff Expr Factorization FileMonitor Float16 Float32 Float64 Function Future GlobalRef GotoNode HTML Hermitian IO IOBuffer IOContext IOStream IPAddr IPv4 IPv6 IndexCartesian IndexLinear IndexStyle InexactError InitError Int Int128 Int16 Int32 Int64 Int8 IntSet Integer InterruptException InvalidStateException Irrational KeyError LabelNode LinSpace LineNumberNode LoadError LowerTriangular MIME Matrix MersenneTwister Method MethodError MethodTable Module NTuple NewvarNode NullException Nullable Number ObjectIdDict OrdinalRange OutOfMemoryError OverflowError Pair ParseError PartialQuickSort PermutedDimsArray Pipe PollingFileWatcher ProcessExitedException Ptr QuoteNode RandomDevice Range RangeIndex Rational RawFD ReadOnlyMemoryError Real ReentrantLock Ref Regex RegexMatch RemoteChannel RemoteException RevString RoundingMode RowVector SSAValue SegmentationFault SerializationState Set SharedArray SharedMatrix SharedVector Signed SimpleVector Slot SlotNumber SparseMatrixCSC SparseVector StackFrame StackOverflowError StackTrace StepRange StepRangeLen StridedArray StridedMatrix StridedVecOrMat StridedVector String SubArray SubString SymTridiagonal Symbol Symmetric SystemError TCPSocket Task Text TextDisplay Timer Tridiagonal Tuple Type TypeError TypeMapEntry TypeMapLevel TypeName TypeVar TypedSlot UDPSocket UInt UInt128 UInt16 UInt32 UInt64 UInt8 UndefRefError UndefVarError UnicodeError UniformScaling Union UnionAll UnitRange Unsigned UpperTriangular Val Vararg VecElement VecOrMat Vector VersionNumber Void WeakKeyDict WeakRef WorkerConfig WorkerPool "},a={keywords:t,illegal:/<\//},n={className:"subst",begin:/\$\(/,end:/\)/,keywords:t},o={className:"variable",begin:"\\$"+r},i={className:"string",contains:[e.BACKSLASH_ESCAPE,n,o],variants:[{begin:/\w*"""/,end:/"""\w*/,relevance:10},{begin:/\w*"/,end:/"\w*/}]},l={className:"string",contains:[e.BACKSLASH_ESCAPE,n,o],begin:"`",end:"`"},s={className:"meta",begin:"@"+r};return a.name="Julia",a.contains=[{className:"number",begin:/(\b0x[\d_]*(\.[\d_]*)?|0x\.\d[\d_]*)p[-+]?\d+|\b0[box][a-fA-F0-9][a-fA-F0-9_]*|(\b\d[\d_]*(\.[\d_]*)?|\.\d[\d_]*)([eEfF][-+]?\d+)?/,relevance:0},{className:"string",begin:/'(.|\\[xXuU][a-zA-Z0-9]+)'/},i,l,s,{className:"comment",variants:[{begin:"#=",end:"=#",relevance:10},{begin:"#",end:"$"}]},e.HASH_COMMENT_MODE,{className:"keyword",begin:"\\b(((abstract|primitive)\\s+)type|(mutable\\s+)?struct)\\b"},{begin:/<:/}],n.contains=a.contains,a}}());hljs.registerLanguage("php-template",function(){"use strict";return function(n){return{name:"PHP template",subLanguage:"xml",contains:[{begin:/<\?(php|=)?/,end:/\?>/,subLanguage:"php",contains:[{begin:"/\\*",end:"\\*/",skip:!0},{begin:'b"',end:'"',skip:!0},{begin:"b'",end:"'",skip:!0},n.inherit(n.APOS_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0}),n.inherit(n.QUOTE_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0})]}]}}}());hljs.registerLanguage("scss",function(){"use strict";return function(e){var t={className:"variable",begin:"(\\$[a-zA-Z-][a-zA-Z0-9_-]*)\\b"},i={className:"number",begin:"#[0-9A-Fa-f]+"};return e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{name:"SCSS",case_insensitive:!0,illegal:"[=/|']",contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"selector-id",begin:"\\#[A-Za-z0-9_-]+",relevance:0},{className:"selector-class",begin:"\\.[A-Za-z0-9_-]+",relevance:0},{className:"selector-attr",begin:"\\[",end:"\\]",illegal:"$"},{className:"selector-tag",begin:"\\b(a|abbr|acronym|address|area|article|aside|audio|b|base|big|blockquote|body|br|button|canvas|caption|cite|code|col|colgroup|command|datalist|dd|del|details|dfn|div|dl|dt|em|embed|fieldset|figcaption|figure|footer|form|frame|frameset|(h[1-6])|head|header|hgroup|hr|html|i|iframe|img|input|ins|kbd|keygen|label|legend|li|link|map|mark|meta|meter|nav|noframes|noscript|object|ol|optgroup|option|output|p|param|pre|progress|q|rp|rt|ruby|samp|script|section|select|small|span|strike|strong|style|sub|sup|table|tbody|td|textarea|tfoot|th|thead|time|title|tr|tt|ul|var|video)\\b",relevance:0},{className:"selector-pseudo",begin:":(visited|valid|root|right|required|read-write|read-only|out-range|optional|only-of-type|only-child|nth-of-type|nth-last-of-type|nth-last-child|nth-child|not|link|left|last-of-type|last-child|lang|invalid|indeterminate|in-range|hover|focus|first-of-type|first-line|first-letter|first-child|first|enabled|empty|disabled|default|checked|before|after|active)"},{className:"selector-pseudo",begin:"::(after|before|choices|first-letter|first-line|repeat-index|repeat-item|selection|value)"},t,{className:"attribute",begin:"\\b(src|z-index|word-wrap|word-spacing|word-break|width|widows|white-space|visibility|vertical-align|unicode-bidi|transition-timing-function|transition-property|transition-duration|transition-delay|transition|transform-style|transform-origin|transform|top|text-underline-position|text-transform|text-shadow|text-rendering|text-overflow|text-indent|text-decoration-style|text-decoration-line|text-decoration-color|text-decoration|text-align-last|text-align|tab-size|table-layout|right|resize|quotes|position|pointer-events|perspective-origin|perspective|page-break-inside|page-break-before|page-break-after|padding-top|padding-right|padding-left|padding-bottom|padding|overflow-y|overflow-x|overflow-wrap|overflow|outline-width|outline-style|outline-offset|outline-color|outline|orphans|order|opacity|object-position|object-fit|normal|none|nav-up|nav-right|nav-left|nav-index|nav-down|min-width|min-height|max-width|max-height|mask|marks|margin-top|margin-right|margin-left|margin-bottom|margin|list-style-type|list-style-position|list-style-image|list-style|line-height|letter-spacing|left|justify-content|initial|inherit|ime-mode|image-orientation|image-resolution|image-rendering|icon|hyphens|height|font-weight|font-variant-ligatures|font-variant|font-style|font-stretch|font-size-adjust|font-size|font-language-override|font-kerning|font-feature-settings|font-family|font|float|flex-wrap|flex-shrink|flex-grow|flex-flow|flex-direction|flex-basis|flex|filter|empty-cells|display|direction|cursor|counter-reset|counter-increment|content|column-width|column-span|column-rule-width|column-rule-style|column-rule-color|column-rule|column-gap|column-fill|column-count|columns|color|clip-path|clip|clear|caption-side|break-inside|break-before|break-after|box-sizing|box-shadow|box-decoration-break|bottom|border-width|border-top-width|border-top-style|border-top-right-radius|border-top-left-radius|border-top-color|border-top|border-style|border-spacing|border-right-width|border-right-style|border-right-color|border-right|border-radius|border-left-width|border-left-style|border-left-color|border-left|border-image-width|border-image-source|border-image-slice|border-image-repeat|border-image-outset|border-image|border-color|border-collapse|border-bottom-width|border-bottom-style|border-bottom-right-radius|border-bottom-left-radius|border-bottom-color|border-bottom|border|background-size|background-repeat|background-position|background-origin|background-image|background-color|background-clip|background-attachment|background-blend-mode|background|backface-visibility|auto|animation-timing-function|animation-play-state|animation-name|animation-iteration-count|animation-fill-mode|animation-duration|animation-direction|animation-delay|animation|align-self|align-items|align-content)\\b",illegal:"[^\\s]"},{begin:"\\b(whitespace|wait|w-resize|visible|vertical-text|vertical-ideographic|uppercase|upper-roman|upper-alpha|underline|transparent|top|thin|thick|text|text-top|text-bottom|tb-rl|table-header-group|table-footer-group|sw-resize|super|strict|static|square|solid|small-caps|separate|se-resize|scroll|s-resize|rtl|row-resize|ridge|right|repeat|repeat-y|repeat-x|relative|progress|pointer|overline|outside|outset|oblique|nowrap|not-allowed|normal|none|nw-resize|no-repeat|no-drop|newspaper|ne-resize|n-resize|move|middle|medium|ltr|lr-tb|lowercase|lower-roman|lower-alpha|loose|list-item|line|line-through|line-edge|lighter|left|keep-all|justify|italic|inter-word|inter-ideograph|inside|inset|inline|inline-block|inherit|inactive|ideograph-space|ideograph-parenthesis|ideograph-numeric|ideograph-alpha|horizontal|hidden|help|hand|groove|fixed|ellipsis|e-resize|double|dotted|distribute|distribute-space|distribute-letter|distribute-all-lines|disc|disabled|default|decimal|dashed|crosshair|collapse|col-resize|circle|char|center|capitalize|break-word|break-all|bottom|both|bolder|bold|block|bidi-override|below|baseline|auto|always|all-scroll|absolute|table|table-cell)\\b"},{begin:":",end:";",contains:[t,i,e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,{className:"meta",begin:"!important"}]},{begin:"@(page|font-face)",lexemes:"@[a-z-]+",keywords:"@page @font-face"},{begin:"@",end:"[{;]",returnBegin:!0,keywords:"and or not only",contains:[{begin:"@[a-z-]+",className:"keyword"},t,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,i,e.CSS_NUMBER_MODE]}]}}}());hljs.registerLanguage("r",function(){"use strict";return function(e){var n="([a-zA-Z]|\\.[a-zA-Z.])[a-zA-Z0-9._]*";return{name:"R",contains:[e.HASH_COMMENT_MODE,{begin:n,keywords:{$pattern:n,keyword:"function if in break next repeat else for return switch while try tryCatch stop warning require library attach detach source setMethod setGeneric setGroupGeneric setClass ...",literal:"NULL NA TRUE FALSE T F Inf NaN NA_integer_|10 NA_real_|10 NA_character_|10 NA_complex_|10"},relevance:0},{className:"number",begin:"0[xX][0-9a-fA-F]+[Li]?\\b",relevance:0},{className:"number",begin:"\\d+(?:[eE][+\\-]?\\d*)?L\\b",relevance:0},{className:"number",begin:"\\d+\\.(?!\\d)(?:i\\b)?",relevance:0},{className:"number",begin:"\\d+(?:\\.\\d*)?(?:[eE][+\\-]?\\d*)?i?\\b",relevance:0},{className:"number",begin:"\\.\\d+(?:[eE][+\\-]?\\d*)?i?\\b",relevance:0},{begin:"`",end:"`",relevance:0},{className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:'"',end:'"'},{begin:"'",end:"'"}]}]}}}());hljs.registerLanguage("sql",function(){"use strict";return function(e){var t=e.COMMENT("--","$");return{name:"SQL",case_insensitive:!0,illegal:/[<>{}*]/,contains:[{beginKeywords:"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke comment values with",end:/;/,endsWithParent:!0,keywords:{$pattern:/[\w\.]+/,keyword:"as abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias all allocate allow alter always analyze ancillary and anti any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound bucket buffer_cache buffer_pool build bulk by byte byteordermark bytes cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain explode export export_set extended extent external external_1 external_2 externally extract failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force foreign form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour hours http id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists keep keep_duplicates key keys kill language large last last_day last_insert_id last_value lateral lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minutes minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notnull notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second seconds section securefile security seed segment select self semi sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime table tables tablespace tablesample tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unnest unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace window with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek",literal:"true false null unknown",built_in:"array bigint binary bit blob bool boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text time timestamp tinyint varchar varchar2 varying void"},contains:[{className:"string",begin:"'",end:"'",contains:[{begin:"''"}]},{className:"string",begin:'"',end:'"',contains:[{begin:'""'}]},{className:"string",begin:"`",end:"`"},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,t,e.HASH_COMMENT_MODE]},e.C_BLOCK_COMMENT_MODE,t,e.HASH_COMMENT_MODE]}}}());hljs.registerLanguage("c",function(){"use strict";return function(e){var n=e.getLanguage("c-like").rawDefinition();return n.name="C",n.aliases=["c","h"],n}}());hljs.registerLanguage("json",function(){"use strict";return function(n){var e={literal:"true false null"},i=[n.C_LINE_COMMENT_MODE,n.C_BLOCK_COMMENT_MODE],t=[n.QUOTE_STRING_MODE,n.C_NUMBER_MODE],a={end:",",endsWithParent:!0,excludeEnd:!0,contains:t,keywords:e},l={begin:"{",end:"}",contains:[{className:"attr",begin:/"/,end:/"/,contains:[n.BACKSLASH_ESCAPE],illegal:"\\n"},n.inherit(a,{begin:/:/})].concat(i),illegal:"\\S"},s={begin:"\\[",end:"\\]",contains:[n.inherit(a)],illegal:"\\S"};return t.push(l,s),i.forEach((function(n){t.push(n)})),{name:"JSON",contains:t,keywords:e,illegal:"\\S"}}}());hljs.registerLanguage("python-repl",function(){"use strict";return function(n){return{aliases:["pycon"],contains:[{className:"meta",starts:{end:/ |$/,starts:{end:"$",subLanguage:"python"}},variants:[{begin:/^>>>(?=[ ]|$)/},{begin:/^\.\.\.(?=[ ]|$)/}]}]}}}());hljs.registerLanguage("markdown",function(){"use strict";return function(n){const e={begin:"<",end:">",subLanguage:"xml",relevance:0},a={begin:"\\[.+?\\][\\(\\[].*?[\\)\\]]",returnBegin:!0,contains:[{className:"string",begin:"\\[",end:"\\]",excludeBegin:!0,returnEnd:!0,relevance:0},{className:"link",begin:"\\]\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0},{className:"symbol",begin:"\\]\\[",end:"\\]",excludeBegin:!0,excludeEnd:!0}],relevance:10},i={className:"strong",contains:[],variants:[{begin:/_{2}/,end:/_{2}/},{begin:/\*{2}/,end:/\*{2}/}]},s={className:"emphasis",contains:[],variants:[{begin:/\*(?!\*)/,end:/\*/},{begin:/_(?!_)/,end:/_/,relevance:0}]};i.contains.push(s),s.contains.push(i);var c=[e,a];return i.contains=i.contains.concat(c),s.contains=s.contains.concat(c),{name:"Markdown",aliases:["md","mkdown","mkd"],contains:[{className:"section",variants:[{begin:"^#{1,6}",end:"$",contains:c=c.concat(i,s)},{begin:"(?=^.+?\\n[=-]{2,}$)",contains:[{begin:"^[=-]*$"},{begin:"^",end:"\\n",contains:c}]}]},e,{className:"bullet",begin:"^[ \t]*([*+-]|(\\d+\\.))(?=\\s+)",end:"\\s+",excludeEnd:!0},i,s,{className:"quote",begin:"^>\\s+",contains:c,end:"$"},{className:"code",variants:[{begin:"(`{3,})(.|\\n)*?\\1`*[ ]*"},{begin:"(~{3,})(.|\\n)*?\\1~*[ ]*"},{begin:"```",end:"```+[ ]*$"},{begin:"~~~",end:"~~~+[ ]*$"},{begin:"`.+?`"},{begin:"(?=^( {4}|\\t))",contains:[{begin:"^( {4}|\\t)",end:"(\\n)$"}],relevance:0}]},{begin:"^[-\\*]{3,}",end:"$"},a,{begin:/^\[[^\n]+\]:/,returnBegin:!0,contains:[{className:"symbol",begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0},{className:"link",begin:/:\s*/,end:/$/,excludeBegin:!0}]}]}}}());hljs.registerLanguage("javascript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);function s(e){return r("(?=",e,")")}function r(...e){return e.map(e=>(function(e){return e?"string"==typeof e?e:e.source:null})(e)).join("")}return function(t){var i="[A-Za-z$_][0-9A-Za-z$_]*",c={begin:/<[A-Za-z0-9\\._:-]+/,end:/\/[A-Za-z0-9\\._:-]+>|\/>/},o={$pattern:"[A-Za-z$_][0-9A-Za-z$_]*",keyword:e.join(" "),literal:n.join(" "),built_in:a.join(" ")},l={className:"number",variants:[{begin:"\\b(0[bB][01]+)n?"},{begin:"\\b(0[oO][0-7]+)n?"},{begin:t.C_NUMBER_RE+"n?"}],relevance:0},E={className:"subst",begin:"\\$\\{",end:"\\}",keywords:o,contains:[]},d={begin:"html`",end:"",starts:{end:"`",returnEnd:!1,contains:[t.BACKSLASH_ESCAPE,E],subLanguage:"xml"}},g={begin:"css`",end:"",starts:{end:"`",returnEnd:!1,contains:[t.BACKSLASH_ESCAPE,E],subLanguage:"css"}},u={className:"string",begin:"`",end:"`",contains:[t.BACKSLASH_ESCAPE,E]};E.contains=[t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,d,g,u,l,t.REGEXP_MODE];var b=E.contains.concat([{begin:/\(/,end:/\)/,contains:["self"].concat(E.contains,[t.C_BLOCK_COMMENT_MODE,t.C_LINE_COMMENT_MODE])},t.C_BLOCK_COMMENT_MODE,t.C_LINE_COMMENT_MODE]),_={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,contains:b};return{name:"JavaScript",aliases:["js","jsx","mjs","cjs"],keywords:o,contains:[t.SHEBANG({binary:"node",relevance:5}),{className:"meta",relevance:10,begin:/^\s*['"]use (strict|asm)['"]/},t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,d,g,u,t.C_LINE_COMMENT_MODE,t.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+",contains:[{className:"type",begin:"\\{",end:"\\}",relevance:0},{className:"variable",begin:i+"(?=\\s*(-)|$)",endsParent:!0,relevance:0},{begin:/(?=[^\n])\s/,relevance:0}]}]}),t.C_BLOCK_COMMENT_MODE,l,{begin:r(/[{,\n]\s*/,s(r(/(((\/\/.*)|(\/\*(.|\n)*\*\/))\s*)*/,i+"\\s*:"))),relevance:0,contains:[{className:"attr",begin:i+s("\\s*:"),relevance:0}]},{begin:"("+t.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*",keywords:"return throw case",contains:[t.C_LINE_COMMENT_MODE,t.C_BLOCK_COMMENT_MODE,t.REGEXP_MODE,{className:"function",begin:"(\\([^(]*(\\([^(]*(\\([^(]*\\))?\\))?\\)|"+t.UNDERSCORE_IDENT_RE+")\\s*=>",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:t.UNDERSCORE_IDENT_RE},{className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:o,contains:b}]}]},{begin:/,/,relevance:0},{className:"",begin:/\s/,end:/\s*/,skip:!0},{variants:[{begin:"<>",end:""},{begin:c.begin,end:c.end}],subLanguage:"xml",contains:[{begin:c.begin,end:c.end,skip:!0,contains:["self"]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/\{/,excludeEnd:!0,contains:[t.inherit(t.TITLE_MODE,{begin:i}),_],illegal:/\[|%/},{begin:/\$[(.]/},t.METHOD_GUARD,{className:"class",beginKeywords:"class",end:/[{;=]/,excludeEnd:!0,illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends"},t.UNDERSCORE_TITLE_MODE]},{beginKeywords:"constructor",end:/\{/,excludeEnd:!0},{begin:"(get|set)\\s+(?="+i+"\\()",end:/{/,keywords:"get set",contains:[t.inherit(t.TITLE_MODE,{begin:i}),{begin:/\(\)/},_]}],illegal:/#(?!!)/}}}());hljs.registerLanguage("typescript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);return function(r){var t={$pattern:"[A-Za-z$_][0-9A-Za-z$_]*",keyword:e.concat(["type","namespace","typedef","interface","public","private","protected","implements","declare","abstract","readonly"]).join(" "),literal:n.join(" "),built_in:a.concat(["any","void","number","boolean","string","object","never","enum"]).join(" ")},s={className:"meta",begin:"@[A-Za-z$_][0-9A-Za-z$_]*"},i={className:"number",variants:[{begin:"\\b(0[bB][01]+)n?"},{begin:"\\b(0[oO][0-7]+)n?"},{begin:r.C_NUMBER_RE+"n?"}],relevance:0},o={className:"subst",begin:"\\$\\{",end:"\\}",keywords:t,contains:[]},c={begin:"html`",end:"",starts:{end:"`",returnEnd:!1,contains:[r.BACKSLASH_ESCAPE,o],subLanguage:"xml"}},l={begin:"css`",end:"",starts:{end:"`",returnEnd:!1,contains:[r.BACKSLASH_ESCAPE,o],subLanguage:"css"}},E={className:"string",begin:"`",end:"`",contains:[r.BACKSLASH_ESCAPE,o]};o.contains=[r.APOS_STRING_MODE,r.QUOTE_STRING_MODE,c,l,E,i,r.REGEXP_MODE];var d={begin:"\\(",end:/\)/,keywords:t,contains:["self",r.QUOTE_STRING_MODE,r.APOS_STRING_MODE,r.NUMBER_MODE]},u={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:t,contains:[r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,s,d]};return{name:"TypeScript",aliases:["ts"],keywords:t,contains:[r.SHEBANG(),{className:"meta",begin:/^\s*['"]use strict['"]/},r.APOS_STRING_MODE,r.QUOTE_STRING_MODE,c,l,E,r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,i,{begin:"("+r.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*",keywords:"return throw case",contains:[r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,r.REGEXP_MODE,{className:"function",begin:"(\\([^(]*(\\([^(]*(\\([^(]*\\))?\\))?\\)|"+r.UNDERSCORE_IDENT_RE+")\\s*=>",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:r.UNDERSCORE_IDENT_RE},{className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:t,contains:d.contains}]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/[\{;]/,excludeEnd:!0,keywords:t,contains:["self",r.inherit(r.TITLE_MODE,{begin:"[A-Za-z$_][0-9A-Za-z$_]*"}),u],illegal:/%/,relevance:0},{beginKeywords:"constructor",end:/[\{;]/,excludeEnd:!0,contains:["self",u]},{begin:/module\./,keywords:{built_in:"module"},relevance:0},{beginKeywords:"module",end:/\{/,excludeEnd:!0},{beginKeywords:"interface",end:/\{/,excludeEnd:!0,keywords:"interface extends"},{begin:/\$[(.]/},{begin:"\\."+r.IDENT_RE,relevance:0},s,d]}}}());hljs.registerLanguage("plaintext",function(){"use strict";return function(t){return{name:"Plain text",aliases:["text","txt"],disableAutodetect:!0}}}());hljs.registerLanguage("less",function(){"use strict";return function(e){var n="([\\w-]+|@{[\\w-]+})",a=[],s=[],t=function(e){return{className:"string",begin:"~?"+e+".*?"+e}},r=function(e,n,a){return{className:e,begin:n,relevance:a}},i={begin:"\\(",end:"\\)",contains:s,relevance:0};s.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,t("'"),t('"'),e.CSS_NUMBER_MODE,{begin:"(url|data-uri)\\(",starts:{className:"string",end:"[\\)\\n]",excludeEnd:!0}},r("number","#[0-9A-Fa-f]+\\b"),i,r("variable","@@?[\\w-]+",10),r("variable","@{[\\w-]+}"),r("built_in","~?`[^`]*?`"),{className:"attribute",begin:"[\\w-]+\\s*:",end:":",returnBegin:!0,excludeEnd:!0},{className:"meta",begin:"!important"});var c=s.concat({begin:"{",end:"}",contains:a}),l={beginKeywords:"when",endsWithParent:!0,contains:[{beginKeywords:"and not"}].concat(s)},o={begin:n+"\\s*:",returnBegin:!0,end:"[;}]",relevance:0,contains:[{className:"attribute",begin:n,end:":",excludeEnd:!0,starts:{endsWithParent:!0,illegal:"[<=$]",relevance:0,contains:s}}]},g={className:"keyword",begin:"@(import|media|charset|font-face|(-[a-z]+-)?keyframes|supports|document|namespace|page|viewport|host)\\b",starts:{end:"[;{}]",returnEnd:!0,contains:s,relevance:0}},d={className:"variable",variants:[{begin:"@[\\w-]+\\s*:",relevance:15},{begin:"@[\\w-]+"}],starts:{end:"[;}]",returnEnd:!0,contains:c}},b={variants:[{begin:"[\\.#:&\\[>]",end:"[;{}]"},{begin:n,end:"{"}],returnBegin:!0,returnEnd:!0,illegal:"[<='$\"]",relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,l,r("keyword","all\\b"),r("variable","@{[\\w-]+}"),r("selector-tag",n+"%?",0),r("selector-id","#"+n),r("selector-class","\\."+n,0),r("selector-tag","&",0),{className:"selector-attr",begin:"\\[",end:"\\]"},{className:"selector-pseudo",begin:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{begin:"\\(",end:"\\)",contains:c},{begin:"!important"}]};return a.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,g,d,o,b),{name:"Less",case_insensitive:!0,illegal:"[=>'/<($\"]",contains:a}}}());hljs.registerLanguage("lua",function(){"use strict";return function(e){var t={begin:"\\[=*\\[",end:"\\]=*\\]",contains:["self"]},a=[e.COMMENT("--(?!\\[=*\\[)","$"),e.COMMENT("--\\[=*\\[","\\]=*\\]",{contains:[t],relevance:10})];return{name:"Lua",keywords:{$pattern:e.UNDERSCORE_IDENT_RE,literal:"true false nil",keyword:"and break do else elseif end for goto if in local not or repeat return then until while",built_in:"_G _ENV _VERSION __index __newindex __mode __call __metatable __tostring __len __gc __add __sub __mul __div __mod __pow __concat __unm __eq __lt __le assert collectgarbage dofile error getfenv getmetatable ipairs load loadfile loadstring module next pairs pcall print rawequal rawget rawset require select setfenv setmetatable tonumber tostring type unpack xpcall arg self coroutine resume yield status wrap create running debug getupvalue debug sethook getmetatable gethook setmetatable setlocal traceback setfenv getinfo setupvalue getlocal getregistry getfenv io lines write close flush open output type read stderr stdin input stdout popen tmpfile math log max acos huge ldexp pi cos tanh pow deg tan cosh sinh random randomseed frexp ceil floor rad abs sqrt modf asin min mod fmod log10 atan2 exp sin atan os exit setlocale date getenv difftime remove time clock tmpname rename execute package preload loadlib loaded loaders cpath config path seeall string sub upper len gfind rep find match char dump gmatch reverse byte format gsub lower table setn insert getn foreachi maxn foreach concat sort remove"},contains:a.concat([{className:"function",beginKeywords:"function",end:"\\)",contains:[e.inherit(e.TITLE_MODE,{begin:"([_a-zA-Z]\\w*\\.)*([_a-zA-Z]\\w*:)?[_a-zA-Z]\\w*"}),{className:"params",begin:"\\(",endsWithParent:!0,contains:a}].concat(a)},e.C_NUMBER_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"string",begin:"\\[=*\\[",end:"\\]=*\\]",contains:[t],relevance:5}])}}}());hljs.registerLanguage("solidity",(()=>{"use strict";function e(){try{return!0}catch(e){return!1}}var a=/-?(\b0[xX]([a-fA-F0-9]_?)*[a-fA-F0-9]|(\b[1-9](_?\d)*(\.((\d_?)*\d)?)?|\.\d(_?\d)*)([eE][-+]?\d(_?\d)*)?|\b0)(?!\w|\$)/;e()&&(a=a.source.replace(/\\b/g,"(?{var a=r(e),o=l(e),c=/[A-Za-z_$][A-Za-z_$0-9.]*/,d=e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][0-9A-Za-z$_]*/,lexemes:c,keywords:n}),u={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,lexemes:c,keywords:n,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,o,s]},_={className:"operator",begin:/:=|->/};return{keywords:n,lexemes:c,contains:[a,o,i,t,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,_,{className:"function",lexemes:c,beginKeywords:"function",end:"{",excludeEnd:!0,contains:[d,u,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,_]}]}},solAposStringMode:r,solQuoteStringMode:l,HEX_APOS_STRING_MODE:i,HEX_QUOTE_STRING_MODE:t,SOL_NUMBER:s,isNegativeLookbehindAvailable:e};const{baseAssembly:c,solAposStringMode:d,solQuoteStringMode:u,HEX_APOS_STRING_MODE:_,HEX_QUOTE_STRING_MODE:m,SOL_NUMBER:b,isNegativeLookbehindAvailable:g}=o;return e=>{for(var a=d(e),s=u(e),n=[],i=0;i<32;i++)n[i]=i+1;var t=n.map((e=>8*e)),r=[];for(i=0;i<=80;i++)r[i]=i;var l=n.map((e=>"bytes"+e)).join(" ")+" ",o=t.map((e=>"uint"+e)).join(" ")+" ",E=t.map((e=>"int"+e)).join(" ")+" ",M=[].concat.apply([],t.map((e=>r.map((a=>e+"x"+a))))),p={keyword:"var bool string int uint "+E+o+"byte bytes "+l+"fixed ufixed "+M.map((e=>"fixed"+e)).join(" ")+" "+M.map((e=>"ufixed"+e)).join(" ")+" enum struct mapping address new delete if else for while continue break return throw emit try catch revert unchecked _ function modifier event constructor fallback receive error virtual override constant immutable anonymous indexed storage memory calldata external public internal payable pure view private returns import from as using global pragma contract interface library is abstract type assembly",literal:"true false wei gwei szabo finney ether seconds minutes hours days weeks years",built_in:"self this super selfdestruct suicide now msg block tx abi blockhash gasleft assert require Error Panic sha3 sha256 keccak256 ripemd160 ecrecover addmod mulmod log0 log1 log2 log3 log4"},O={className:"operator",begin:/[+\-!~*\/%<>&^|=]/},C=/[A-Za-z_$][A-Za-z_$0-9]*/,N={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,lexemes:C,keywords:p,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,s,b,"self"]},f={begin:/\.\s*/,end:/[^A-Za-z0-9$_\.]/,excludeBegin:!0,excludeEnd:!0,keywords:{built_in:"gas value selector address length push pop send transfer call callcode delegatecall staticcall balance code codehash wrap unwrap name creationCode runtimeCode interfaceId min max"},relevance:2},y=e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][0-9A-Za-z$_]*/,lexemes:C,keywords:p}),w={className:"built_in",begin:(g()?"(?{"use strict";function e(){try{return!0}catch(e){return!1}}var a=/-?(\b0[xX]([a-fA-F0-9]_?)*[a-fA-F0-9]|(\b[1-9](_?\d)*(\.((\d_?)*\d)?)?|\.\d(_?\d)*)([eE][-+]?\d(_?\d)*)?|\b0)(?!\w|\$)/;e()&&(a=a.source.replace(/\\b/g,"(?{var a=d(e),n=r(e),o=/[A-Za-z_$][A-Za-z_$0-9.]*/,c=e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][0-9A-Za-z$_]*/,lexemes:o,keywords:t}),u={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,lexemes:o,keywords:t,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,n,s]},p={className:"operator",begin:/:=|->/};return{keywords:t,lexemes:o,contains:[a,n,i,l,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,p,{className:"function",lexemes:o,beginKeywords:"function",end:"{",excludeEnd:!0,contains:[c,u,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,p]}]}},solAposStringMode:d,solQuoteStringMode:r,HEX_APOS_STRING_MODE:i,HEX_QUOTE_STRING_MODE:l,SOL_NUMBER:s,isNegativeLookbehindAvailable:e};const{SOL_ASSEMBLY_KEYWORDS:o,baseAssembly:c,isNegativeLookbehindAvailable:u}=n;return e=>{var a={keyword:o.keyword+" object code data",built_in:o.built_in+" datasize dataoffset datacopy setimmutable loadimmutable linkersymbol memoryguard",literal:o.literal},s=/\bverbatim_[1-9]?[0-9]i_[1-9]?[0-9]o\b(?!\$)/;u()&&(s=s.source.replace(/\\b/,"(?" + buffer.Append("") + return buffer } - // Construct a list of strings for each call made in the sequence - var elementStrings []string + // Construct the buffer for each call made in the sequence for i := 0; i < len(cs); i++ { // Add the string representing the call - elementStrings = append(elementStrings, fmt.Sprintf("%d) %s", i+1, cs[i].String())) + buffer.Append(fmt.Sprintf("%d) %s\n", i+1, cs[i].String())) // If we have an execution trace attached, print information about it. if cs[i].ExecutionTrace != nil { - elementStrings = append(elementStrings, cs[i].ExecutionTrace.String()) + buffer.Append(cs[i].ExecutionTrace.Log().Elements()...) + buffer.Append("\n") } } - // Join each element with new lines and return it. - return strings.Join(elementStrings, "\n") + // Return the buffer + return buffer +} + +// String returns the string representation of this call sequence +func (cs CallSequence) String() string { + // Internally, we just call the log function, get the list of elements and create their non-colorized string representation + // Might be useful for 3rd party apps + return cs.Log().String() } // Clone creates a copy of the underlying CallSequence. @@ -109,7 +119,7 @@ func (cs CallSequence) Hash() (common.Hash, error) { // Try to obtain a hash for the message/call. If this fails, we will replace it in the deferred panic // recovery. - messageHashData = utils.MessageToTransaction(cse.Call).Hash().Bytes() + messageHashData = utils.MessageToTransaction(cse.Call.ToCoreMessage()).Hash().Bytes() }() // Hash the message hash data. @@ -196,7 +206,17 @@ func (cse *CallSequenceElement) Method() (*abi.Method, error) { if cse.Contract == nil { return nil, nil } - return cse.Contract.CompiledContract().Abi.MethodById(cse.Call.Data()) + + // If we have a method resolved, return it. + if cse.Call != nil && cse.Call.DataAbiValues != nil { + if cse.Call.DataAbiValues.Method != nil { + return cse.Call.DataAbiValues.Method, nil + } + } + + // Try to resolve the method by ID from the call data. + method, err := cse.Contract.CompiledContract().Abi.MethodById(cse.Call.Data) + return method, err } // String returns a displayable string representing the CallSequenceElement. @@ -211,11 +231,11 @@ func (cse *CallSequenceElement) String() string { method, err := cse.Method() methodName := "" if err == nil && method != nil { - methodName = method.Name + methodName = method.Sig } // Next decode our arguments (we jump four bytes to skip the function selector) - args, err := method.Inputs.Unpack(cse.Call.Data()[4:]) + args, err := method.Inputs.Unpack(cse.Call.Data[4:]) argsText := "" if err == nil { argsText, err = valuegeneration.EncodeABIArgumentsToString(method.Inputs, args) @@ -240,10 +260,10 @@ func (cse *CallSequenceElement) String() string { argsText, blockNumberStr, blockTimeStr, - cse.Call.Gas(), - cse.Call.GasPrice().String(), - cse.Call.Value().String(), - cse.Call.From(), + cse.Call.GasLimit, + cse.Call.GasPrice.String(), + cse.Call.Value.String(), + cse.Call.From, ) } @@ -257,14 +277,9 @@ func (cse *CallSequenceElement) AttachExecutionTrace(chain *chain.TestChain, con return fmt.Errorf("failed to resolve execution trace as the chain reference is nil, indicating the call sequence element has never been executed") } - // Obtain the state prior to executing this transaction. - state, err := chain.StateFromRoot(cse.ChainReference.MessageResults().PreStateRoot) - if err != nil { - return fmt.Errorf("failed to resolve execution trace due to error loading root hash from database: %v", err) - } - + var err error // Perform our call with the given trace - _, cse.ExecutionTrace, err = executiontracer.CallWithExecutionTrace(chain, contractDefinitions, cse.Call, state) + _, cse.ExecutionTrace, err = executiontracer.CallWithExecutionTrace(chain, contractDefinitions, cse.Call.ToCoreMessage(), nil) if err != nil { return fmt.Errorf("failed to resolve execution trace due to error replaying the call: %v", err) } diff --git a/fuzzing/calls/call_sequence_execution.go b/fuzzing/calls/call_sequence_execution.go index 8007bb6a..593465f4 100644 --- a/fuzzing/calls/call_sequence_execution.go +++ b/fuzzing/calls/call_sequence_execution.go @@ -2,7 +2,11 @@ package calls import ( "fmt" + "github.com/crytic/medusa/chain" + "github.com/crytic/medusa/fuzzing/contracts" + "github.com/crytic/medusa/fuzzing/executiontracer" + "github.com/crytic/medusa/utils" ) // ExecuteCallSequenceFetchElementFunc describes a function that is called to obtain the next call sequence element to @@ -22,7 +26,7 @@ type ExecuteCallSequenceExecutionCheckFunc func(currentExecutedSequence CallSequ // A "post element executed check" function is provided to check whether execution should stop after each element is // executed. // Returns the call sequence which was executed and an error if one occurs. -func ExecuteCallSequenceIteratively(chain *chain.TestChain, fetchElementFunc ExecuteCallSequenceFetchElementFunc, executionCheckFunc ExecuteCallSequenceExecutionCheckFunc) (CallSequence, error) { +func ExecuteCallSequenceIteratively(chain *chain.TestChain, fetchElementFunc ExecuteCallSequenceFetchElementFunc, executionCheckFunc ExecuteCallSequenceExecutionCheckFunc, additionalTracers ...*chain.TestChainTracer) (CallSequence, error) { // If there is no fetch element function provided, throw an error if fetchElementFunc == nil { return nil, fmt.Errorf("could not execute call sequence on chain as the 'fetch element function' provided was nil") @@ -84,7 +88,8 @@ func ExecuteCallSequenceIteratively(chain *chain.TestChain, fetchElementFunc Exe } // Try to add our transaction to this block. - err = chain.PendingBlockAddTx(callSequenceElement.Call) + err = chain.PendingBlockAddTx(callSequenceElement.Call.ToCoreMessage(), additionalTracers...) + if err != nil { // If we encountered a block gas limit error, this tx is too expensive to fit in this block. // If there are other transactions in the block, this makes sense. The block is "full". @@ -161,6 +166,39 @@ func ExecuteCallSequence(chain *chain.TestChain, callSequence CallSequence) (Cal return nil, nil } - // Execute our provided call sequence iteratively. return ExecuteCallSequenceIteratively(chain, fetchElementFunc, nil) } + +// ExecuteCallSequenceWithExecutionTracer attaches an executiontracer.ExecutionTracer to ExecuteCallSequenceIteratively and attaches execution traces to the call sequence elements. +func ExecuteCallSequenceWithExecutionTracer(testChain *chain.TestChain, contractDefinitions contracts.Contracts, callSequence CallSequence, verboseTracing bool) (CallSequence, error) { + // Create a new execution tracer + executionTracer := executiontracer.NewExecutionTracer(contractDefinitions, testChain.CheatCodeContracts()) + defer executionTracer.Close() + + // Execute our sequence with a simple fetch operation provided to obtain each element. + fetchElementFunc := func(currentIndex int) (*CallSequenceElement, error) { + if currentIndex < len(callSequence) { + return callSequence[currentIndex], nil + } + return nil, nil + } + + // Execute the call sequence and attach the execution tracer + executedCallSeq, err := ExecuteCallSequenceIteratively(testChain, fetchElementFunc, nil, executionTracer.NativeTracer()) + + // By default, we only trace the last element in the call sequence. + traceFrom := len(callSequence) - 1 + // If verbose tracing is enabled, we want to trace all elements in the call sequence. + if verboseTracing { + traceFrom = 0 + } + + // Attach the execution trace for each requested call sequence element + for ; traceFrom < len(callSequence); traceFrom++ { + callSequenceElement := callSequence[traceFrom] + hash := utils.MessageToTransaction(callSequenceElement.Call.ToCoreMessage()).Hash() + callSequenceElement.ExecutionTrace = executionTracer.GetTrace(hash) + } + + return executedCallSeq, err +} diff --git a/fuzzing/calls/gen_call_message_json.go b/fuzzing/calls/gen_call_message_json.go index 5b3583bf..26662c91 100644 --- a/fuzzing/calls/gen_call_message_json.go +++ b/fuzzing/calls/gen_call_message_json.go @@ -8,6 +8,7 @@ import ( "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/ethereum/go-ethereum/core/types" ) var _ = (*callMessageMarshaling)(nil) @@ -15,78 +16,90 @@ var _ = (*callMessageMarshaling)(nil) // MarshalJSON marshals as JSON. func (c CallMessage) MarshalJSON() ([]byte, error) { type CallMessage struct { - MsgFrom common.Address `json:"from"` - MsgTo *common.Address `json:"to"` - MsgNonce uint64 `json:"nonce"` - MsgValue *hexutil.Big `json:"value"` - MsgGas uint64 `json:"gas"` - MsgGasPrice *hexutil.Big `json:"gasPrice"` - MsgGasFeeCap *hexutil.Big `json:"gasFeeCap"` - MsgGasTipCap *hexutil.Big `json:"gasTipCap"` - MsgData hexutil.Bytes `json:"data,omitempty"` - MsgDataAbiValues *CallMessageDataAbiValues `json:"dataAbiValues,omitempty"` + From common.Address `json:"from"` + To *common.Address `json:"to"` + Nonce uint64 `json:"nonce"` + Value *hexutil.Big `json:"value"` + GasLimit uint64 `json:"gasLimit"` + GasPrice *hexutil.Big `json:"gasPrice"` + GasFeeCap *hexutil.Big `json:"gasFeeCap"` + GasTipCap *hexutil.Big `json:"gasTipCap"` + Data hexutil.Bytes `json:"data,omitempty"` + DataAbiValues *CallMessageDataAbiValues `json:"dataAbiValues,omitempty"` + AccessList types.AccessList + SkipAccountChecks bool } var enc CallMessage - enc.MsgFrom = c.MsgFrom - enc.MsgTo = c.MsgTo - enc.MsgNonce = c.MsgNonce - enc.MsgValue = (*hexutil.Big)(c.MsgValue) - enc.MsgGas = c.MsgGas - enc.MsgGasPrice = (*hexutil.Big)(c.MsgGasPrice) - enc.MsgGasFeeCap = (*hexutil.Big)(c.MsgGasFeeCap) - enc.MsgGasTipCap = (*hexutil.Big)(c.MsgGasTipCap) - enc.MsgData = c.MsgData - enc.MsgDataAbiValues = c.MsgDataAbiValues + enc.From = c.From + enc.To = c.To + enc.Nonce = c.Nonce + enc.Value = (*hexutil.Big)(c.Value) + enc.GasLimit = c.GasLimit + enc.GasPrice = (*hexutil.Big)(c.GasPrice) + enc.GasFeeCap = (*hexutil.Big)(c.GasFeeCap) + enc.GasTipCap = (*hexutil.Big)(c.GasTipCap) + enc.Data = c.Data + enc.DataAbiValues = c.DataAbiValues + enc.AccessList = c.AccessList + enc.SkipAccountChecks = c.SkipAccountChecks return json.Marshal(&enc) } // UnmarshalJSON unmarshals from JSON. func (c *CallMessage) UnmarshalJSON(input []byte) error { type CallMessage struct { - MsgFrom *common.Address `json:"from"` - MsgTo *common.Address `json:"to"` - MsgNonce *uint64 `json:"nonce"` - MsgValue *hexutil.Big `json:"value"` - MsgGas *uint64 `json:"gas"` - MsgGasPrice *hexutil.Big `json:"gasPrice"` - MsgGasFeeCap *hexutil.Big `json:"gasFeeCap"` - MsgGasTipCap *hexutil.Big `json:"gasTipCap"` - MsgData *hexutil.Bytes `json:"data,omitempty"` - MsgDataAbiValues *CallMessageDataAbiValues `json:"dataAbiValues,omitempty"` + From *common.Address `json:"from"` + To *common.Address `json:"to"` + Nonce *uint64 `json:"nonce"` + Value *hexutil.Big `json:"value"` + GasLimit *uint64 `json:"gasLimit"` + GasPrice *hexutil.Big `json:"gasPrice"` + GasFeeCap *hexutil.Big `json:"gasFeeCap"` + GasTipCap *hexutil.Big `json:"gasTipCap"` + Data *hexutil.Bytes `json:"data,omitempty"` + DataAbiValues *CallMessageDataAbiValues `json:"dataAbiValues,omitempty"` + AccessList *types.AccessList + SkipAccountChecks *bool } var dec CallMessage if err := json.Unmarshal(input, &dec); err != nil { return err } - if dec.MsgFrom != nil { - c.MsgFrom = *dec.MsgFrom + if dec.From != nil { + c.From = *dec.From } - if dec.MsgTo != nil { - c.MsgTo = dec.MsgTo + if dec.To != nil { + c.To = dec.To } - if dec.MsgNonce != nil { - c.MsgNonce = *dec.MsgNonce + if dec.Nonce != nil { + c.Nonce = *dec.Nonce } - if dec.MsgValue != nil { - c.MsgValue = (*big.Int)(dec.MsgValue) + if dec.Value != nil { + c.Value = (*big.Int)(dec.Value) } - if dec.MsgGas != nil { - c.MsgGas = *dec.MsgGas + if dec.GasLimit != nil { + c.GasLimit = *dec.GasLimit } - if dec.MsgGasPrice != nil { - c.MsgGasPrice = (*big.Int)(dec.MsgGasPrice) + if dec.GasPrice != nil { + c.GasPrice = (*big.Int)(dec.GasPrice) } - if dec.MsgGasFeeCap != nil { - c.MsgGasFeeCap = (*big.Int)(dec.MsgGasFeeCap) + if dec.GasFeeCap != nil { + c.GasFeeCap = (*big.Int)(dec.GasFeeCap) } - if dec.MsgGasTipCap != nil { - c.MsgGasTipCap = (*big.Int)(dec.MsgGasTipCap) + if dec.GasTipCap != nil { + c.GasTipCap = (*big.Int)(dec.GasTipCap) } - if dec.MsgData != nil { - c.MsgData = *dec.MsgData + if dec.Data != nil { + c.Data = *dec.Data } - if dec.MsgDataAbiValues != nil { - c.MsgDataAbiValues = dec.MsgDataAbiValues + if dec.DataAbiValues != nil { + c.DataAbiValues = dec.DataAbiValues + } + if dec.AccessList != nil { + c.AccessList = *dec.AccessList + } + if dec.SkipAccountChecks != nil { + c.SkipAccountChecks = *dec.SkipAccountChecks } return nil } diff --git a/fuzzing/config/config.go b/fuzzing/config/config.go index 72e04acf..1ccbfb61 100644 --- a/fuzzing/config/config.go +++ b/fuzzing/config/config.go @@ -4,19 +4,31 @@ import ( "encoding/json" "errors" "fmt" - "github.com/crytic/medusa/chain/config" + "math/big" "os" + "github.com/crytic/medusa/chain/config" "github.com/crytic/medusa/compilation" + "github.com/crytic/medusa/logging" "github.com/crytic/medusa/utils" + "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/rs/zerolog" ) +// The following directives will be picked up by the `go generate` command to generate JSON marshaling code from +// templates defined below. They should be preserved for re-use in case we change our structures. +//go:generate go get github.com/fjl/gencodec +//go:generate go run github.com/fjl/gencodec -type FuzzingConfig -field-override fuzzingConfigMarshaling -out gen_fuzzing_config.go + type ProjectConfig struct { // Fuzzing describes the configuration used in fuzzing campaigns. Fuzzing FuzzingConfig `json:"fuzzing"` // Compilation describes the configuration used to compile the underlying project. Compilation *compilation.CompilationConfig `json:"compilation"` + + // Logging describes the configuration used for logging to file and console + Logging LoggingConfig `json:"logging"` } // FuzzingConfig describes the configuration options used by the fuzzing.Fuzzer. @@ -28,14 +40,17 @@ type FuzzingConfig struct { // so that memory from its underlying chain is freed. WorkerResetLimit int `json:"workerResetLimit"` - // Timeout describes a time in seconds for which the fuzzing operation should run. Providing negative or zero value - // will result in no timeout. + // Timeout describes a time threshold in seconds for which the fuzzing operation should run. Providing negative or + // zero value will result in no timeout. Timeout int `json:"timeout"` // TestLimit describes a threshold for the number of transactions to test, after which it will exit. This number // must be non-negative. A zero value indicates the test limit should not be enforced. TestLimit uint64 `json:"testLimit"` + // ShrinkLimit describes a threshold for the iterations (call sequence tests) which shrinking should perform. + ShrinkLimit uint64 `json:"shrinkLimit"` + // CallSequenceLength describes the maximum length a transaction sequence can be generated as. CallSequenceLength int `json:"callSequenceLength"` @@ -46,10 +61,22 @@ type FuzzingConfig struct { // CoverageEnabled describes whether to use coverage-guided fuzzing CoverageEnabled bool `json:"coverageEnabled"` - // DeploymentOrder determines the order in which the contracts should be deployed - DeploymentOrder []string `json:"deploymentOrder"` + // CoverageFormats indicate which reports to generate: "lcov" and "html" are supported. + CoverageFormats []string `json:"coverageFormats"` - // Constructor arguments for contracts deployment. It is available only in init mode + // TargetContracts are the target contracts for fuzz testing + TargetContracts []string `json:"targetContracts"` + + // PredeployedContracts are contracts that can be deterministically deployed at a specific address. It maps the + // contract name to the deployment address + PredeployedContracts map[string]string `json:"predeployedContracts"` + + // TargetContractsBalances holds the amount of wei that should be sent during deployment for one or more contracts in + // TargetContracts + TargetContractsBalances []*big.Int `json:"targetContractsBalances"` + + // ConstructorArgs holds the constructor arguments for TargetContracts deployments. It is available via the project + // configuration ConstructorArgs map[string]map[string]any `json:"constructorArgs"` // DeployerAddress describe the account address to be used to deploy contracts. @@ -81,6 +108,13 @@ type FuzzingConfig struct { TestChainConfig config.TestChainConfig `json:"chainConfig"` } +// fuzzingConfigMarshaling is a structure that overrides field types during JSON marshaling. It allows FuzzingConfig to +// have its custom marshaling methods auto-generated and will handle type conversions for serialization purposes. +// For example, this enables serialization of big.Int but specifying a different field type to control serialization. +type fuzzingConfigMarshaling struct { + TargetContractsBalances []*hexutil.Big +} + // TestingConfig describes the configuration options used for testing type TestingConfig struct { // StopOnFailedTest describes whether the fuzzing.Fuzzer should stop after detecting the first failed test. @@ -90,6 +124,10 @@ type TestingConfig struct { // to determine which contract a deployed contract is. StopOnFailedContractMatching bool `json:"stopOnFailedContractMatching"` + // StopOnNoTests describes whether the fuzzing.Fuzzer should stop the fuzzer from starting if no tests (property, + // assertion, optimization, custom) are found. + StopOnNoTests bool `json:"stopOnNoTests"` + // TestAllContracts indicates whether all contracts should be tested (including dynamically deployed ones), rather // than just the contracts specified in the project configuration's deployment order. TestAllContracts bool `json:"testAllContracts"` @@ -103,7 +141,52 @@ type TestingConfig struct { AssertionTesting AssertionTestingConfig `json:"assertionTesting"` // PropertyTesting describes the configuration used for property testing. - PropertyTesting PropertyTestConfig `json:"propertyTesting"` + PropertyTesting PropertyTestingConfig `json:"propertyTesting"` + + // OptimizationTesting describes the configuration used for optimization testing. + OptimizationTesting OptimizationTestingConfig `json:"optimizationTesting"` + + // TargetFunctionSignatures is a list function signatures call the fuzzer should exclusively target by omitting calls to other signatures. + // The signatures should specify the contract name and signature in the ABI format like `Contract.func(uint256,bytes32)`. + TargetFunctionSignatures []string `json:"targetFunctionSignatures"` + + // ExcludeFunctionSignatures is a list of function signatures that will be excluded from call sequences. + // The signatures should specify the contract name and signature in the ABI format like `Contract.func(uint256,bytes32)`. + ExcludeFunctionSignatures []string `json:"excludeFunctionSignatures"` +} + +// Validate validates that the TestingConfig meets certain requirements. +func (testCfg *TestingConfig) Validate() error { + // Verify that target and exclude function signatures are used mutually exclusive. + if (len(testCfg.TargetFunctionSignatures) != 0) && (len(testCfg.ExcludeFunctionSignatures) != 0) { + return errors.New("project configuration must specify only one of blacklist or whitelist at a time") + } + + // Verify property testing fields. + if testCfg.PropertyTesting.Enabled { + // Test prefixes must be supplied if property testing is enabled. + if len(testCfg.PropertyTesting.TestPrefixes) == 0 { + return errors.New("project configuration must specify test name prefixes if property testing is enabled") + } + } + + if testCfg.OptimizationTesting.Enabled { + // Test prefixes must be supplied if optimization testing is enabled. + if len(testCfg.OptimizationTesting.TestPrefixes) == 0 { + return errors.New("project configuration must specify test name prefixes if optimization testing is enabled") + } + } + + // Validate that prefixes do not overlap + for _, prefix := range testCfg.PropertyTesting.TestPrefixes { + for _, prefix2 := range testCfg.OptimizationTesting.TestPrefixes { + if prefix == prefix2 { + return errors.New("project configuration must specify unique test name prefixes for property and optimization testing") + } + } + } + + return nil } // AssertionTestingConfig describes the configuration options used for assertion testing @@ -113,10 +196,46 @@ type AssertionTestingConfig struct { // TestViewMethods dictates whether constant/pure/view methods should be tested. TestViewMethods bool `json:"testViewMethods"` + + // PanicCodeConfig describes the various panic codes that can be enabled and be treated as a "failing case" + PanicCodeConfig PanicCodeConfig `json:"panicCodeConfig"` +} + +// PanicCodeConfig describes the various panic codes that can be enabled and be treated as a failing assertion test +type PanicCodeConfig struct { + // FailOnCompilerInsertedPanic describes whether a generic compiler inserted panic should be treated as a failing case + FailOnCompilerInsertedPanic bool `json:"failOnCompilerInsertedPanic"` + + // FailOnAssertion describes whether an assertion failure should be treated as a failing case + FailOnAssertion bool `json:"failOnAssertion"` + + // FailOnArithmeticUnderflow describes whether an arithmetic underflow should be treated as a failing case + FailOnArithmeticUnderflow bool `json:"failOnArithmeticUnderflow"` + + // FailOnDivideByZero describes whether division by zero should be treated as a failing case + FailOnDivideByZero bool `json:"failOnDivideByZero"` + + // FailOnEnumTypeConversionOutOfBounds describes whether an out-of-bounds enum access should be treated as a failing case + FailOnEnumTypeConversionOutOfBounds bool `json:"failOnEnumTypeConversionOutOfBounds"` + + // FailOnIncorrectStorageAccess describes whether an out-of-bounds storage access should be treated as a failing case + FailOnIncorrectStorageAccess bool `json:"failOnIncorrectStorageAccess"` + + // FailOnPopEmptyArray describes whether a pop operation on an empty array should be treated as a failing case + FailOnPopEmptyArray bool `json:"failOnPopEmptyArray"` + + // FailOnOutOfBoundsArrayAccess describes whether an out-of-bounds array access should be treated as a failing case + FailOnOutOfBoundsArrayAccess bool `json:"failOnOutOfBoundsArrayAccess"` + + // FailOnAllocateTooMuchMemory describes whether excessive memory usage should be treated as a failing case + FailOnAllocateTooMuchMemory bool `json:"failOnAllocateTooMuchMemory"` + + // FailOnCallUninitializedVariable describes whether calling an un-initialized variable should be treated as a failing case + FailOnCallUninitializedVariable bool `json:"failOnCallUninitializedVariable"` } -// PropertyTestConfig describes the configuration options used for property testing -type PropertyTestConfig struct { +// PropertyTestingConfig describes the configuration options used for property testing +type PropertyTestingConfig struct { // Enabled describes whether testing is enabled. Enabled bool `json:"enabled"` @@ -124,18 +243,57 @@ type PropertyTestConfig struct { TestPrefixes []string `json:"testPrefixes"` } +// OptimizationTestingConfig describes the configuration options used for optimization testing +type OptimizationTestingConfig struct { + // Enabled describes whether testing is enabled. + Enabled bool `json:"enabled"` + + // TestPrefixes dictates what method name prefixes will determine if a contract method is an optimization test. + TestPrefixes []string `json:"testPrefixes"` +} + +// LoggingConfig describes the configuration options for logging to console and file +type LoggingConfig struct { + // Level describes whether logs of certain severity levels (eg info, warning, etc.) will be emitted or discarded. + // Increasing level values represent more severe logs + Level zerolog.Level `json:"level"` + + // LogDirectory describes what directory log files should be outputted in/ LogDirectory being a non-empty string is + // equivalent to enabling file logging. + LogDirectory string `json:"logDirectory"` + + // NoColor indicates whether log messages should be displayed with colored formatting. + NoColor bool `json:"noColor"` +} + +// ConsoleLoggingConfig describes the configuration options for logging to console. Note that this not being used right now +// but will be added to LoggingConfig down the line +// TODO: Update when implementing a structured logging solution +type ConsoleLoggingConfig struct { + // Enabled describes whether console logging is enabled. + Enabled bool `json:"enabled"` +} + +// FileLoggingConfig describes the configuration options for logging to file. Note that this not being used right now +// but will be added to LoggingConfig down the line +// TODO: Update when implementing a structured logging solution +type FileLoggingConfig struct { + // LogDirectory describes what directory log files should be outputted in. LogDirectory being a non-empty string + // is equivalent to enabling file logging. + LogDirectory bool `json:"logDirectory"` +} + // ReadProjectConfigFromFile reads a JSON-serialized ProjectConfig from a provided file path. // Returns the ProjectConfig if it succeeds, or an error if one occurs. -func ReadProjectConfigFromFile(path string) (*ProjectConfig, error) { +func ReadProjectConfigFromFile(path string, platform string) (*ProjectConfig, error) { // Read our project configuration file data - fmt.Printf("Reading configuration file: %s\n", path) b, err := os.ReadFile(path) if err != nil { return nil, err } // Parse the project configuration - projectConfig, err := GetDefaultProjectConfig("") + projectConfig, err := GetDefaultProjectConfig(platform) if err != nil { return nil, err } @@ -168,6 +326,17 @@ func (p *ProjectConfig) WriteToFile(path string) error { // Validate validates that the ProjectConfig meets certain requirements. // Returns an error if one occurs. func (p *ProjectConfig) Validate() error { + // Create logger instance if global logger is available + logger := logging.NewLogger(zerolog.Disabled) + if logging.GlobalLogger != nil { + logger = logging.GlobalLogger.NewSubLogger("module", "fuzzer config") + } + + // Validate testing config + if err := p.Fuzzing.Testing.Validate(); err != nil { + return err + } + // Verify the worker count is a positive number. if p.Fuzzing.Workers <= 0 { return errors.New("project configuration must specify a positive number for the worker count") @@ -183,12 +352,30 @@ func (p *ProjectConfig) Validate() error { return errors.New("project configuration must specify a positive number for the worker reset limit") } + // Verify timeout + if p.Fuzzing.Timeout < 0 { + return errors.New("project configuration must specify a positive number for the timeout") + } + // Verify gas limits are appropriate if p.Fuzzing.BlockGasLimit < p.Fuzzing.TransactionGasLimit { return errors.New("project configuration must specify a block gas limit which is not less than the transaction gas limit") } if p.Fuzzing.BlockGasLimit == 0 || p.Fuzzing.TransactionGasLimit == 0 { - return errors.New("project configuration must specify a block and transaction gas limit which is non-zero") + return errors.New("project configuration must specify a block and transaction gas limit which are non-zero") + } + + // Log warning if max block delay is zero + if p.Fuzzing.MaxBlockNumberDelay == 0 { + logger.Warn("The maximum block number delay is set to zero. Please be aware that transactions will " + + "always be fit in the same block until the block gas limit is reached and that the block number will always " + + "increment by one.") + } + + // Log warning if max timestamp delay is zero + if p.Fuzzing.MaxBlockTimestampDelay == 0 { + logger.Warn("The maximum timestamp delay is set to zero. Please be aware that block time jumps will " + + "always be exactly one.") } // Verify that senders are well-formed addresses @@ -201,12 +388,27 @@ func (p *ProjectConfig) Validate() error { return errors.New("project configuration must specify only a well-formed deployer address") } - // Verify property testing fields. - if p.Fuzzing.Testing.PropertyTesting.Enabled { - // Test prefixes must be supplied if property testing is enabled. - if len(p.Fuzzing.Testing.PropertyTesting.TestPrefixes) == 0 { - return errors.New("project configuration must specify test name prefixes if property testing is enabled") + // Verify that addresses of predeployed contracts are well-formed + for _, addr := range p.Fuzzing.PredeployedContracts { + if _, err := utils.HexStringToAddress(addr); err != nil { + return errors.New("project configuration must specify only well-formed predeployed contract address(es)") } } + + // The coverage report format must be either "lcov" or "html" + if p.Fuzzing.CoverageFormats != nil { + for _, report := range p.Fuzzing.CoverageFormats { + if report != "lcov" && report != "html" { + return fmt.Errorf("project configuration must specify only valid coverage reports (lcov, html): %s", report) + } + } + } + + // Ensure that the log level is a valid one + level, err := zerolog.ParseLevel(p.Logging.Level.String()) + if err != nil || level == zerolog.FatalLevel { + return errors.New("project config must specify a valid log level (trace, debug, info, warn, error, or panic)") + } + return nil } diff --git a/fuzzing/config/config_defaults.go b/fuzzing/config/config_defaults.go index cdcf86af..38532a03 100644 --- a/fuzzing/config/config_defaults.go +++ b/fuzzing/config/config_defaults.go @@ -1,8 +1,11 @@ package config import ( + "math/big" + testChainConfig "github.com/crytic/medusa/chain/config" "github.com/crytic/medusa/compilation" + "github.com/rs/zerolog" ) // GetDefaultProjectConfig obtains a default configuration for a project. It populates a default compilation config @@ -31,15 +34,19 @@ func GetDefaultProjectConfig(platform string) (*ProjectConfig, error) { // Create a project configuration projectConfig := &ProjectConfig{ Fuzzing: FuzzingConfig{ - Workers: 10, - WorkerResetLimit: 50, - Timeout: 0, - TestLimit: 0, - CallSequenceLength: 100, - DeploymentOrder: []string{}, - ConstructorArgs: map[string]map[string]any{}, - CorpusDirectory: "", - CoverageEnabled: true, + Workers: 10, + WorkerResetLimit: 50, + Timeout: 0, + TestLimit: 0, + ShrinkLimit: 5_000, + CallSequenceLength: 100, + TargetContracts: []string{}, + TargetContractsBalances: []*big.Int{}, + PredeployedContracts: map[string]string{}, + ConstructorArgs: map[string]map[string]any{}, + CorpusDirectory: "", + CoverageEnabled: true, + CoverageFormats: []string{"html", "lcov"}, SenderAddresses: []string{ "0x10000", "0x20000", @@ -52,23 +59,40 @@ func GetDefaultProjectConfig(platform string) (*ProjectConfig, error) { TransactionGasLimit: 12_500_000, Testing: TestingConfig{ StopOnFailedTest: true, - StopOnFailedContractMatching: true, + StopOnFailedContractMatching: false, + StopOnNoTests: true, TestAllContracts: false, TraceAll: false, + TargetFunctionSignatures: []string{}, + ExcludeFunctionSignatures: []string{}, AssertionTesting: AssertionTestingConfig{ - Enabled: false, + Enabled: true, TestViewMethods: false, + PanicCodeConfig: PanicCodeConfig{ + FailOnAssertion: true, + }, }, - PropertyTesting: PropertyTestConfig{ + PropertyTesting: PropertyTestingConfig{ Enabled: true, TestPrefixes: []string{ - "fuzz_", + "property_", + }, + }, + OptimizationTesting: OptimizationTestingConfig{ + Enabled: true, + TestPrefixes: []string{ + "optimize_", }, }, }, TestChainConfig: *chainConfig, }, Compilation: compilationConfig, + Logging: LoggingConfig{ + Level: zerolog.InfoLevel, + LogDirectory: "", + NoColor: false, + }, } // Return the project configuration diff --git a/fuzzing/config/gen_fuzzing_config.go b/fuzzing/config/gen_fuzzing_config.go new file mode 100644 index 00000000..47f780c5 --- /dev/null +++ b/fuzzing/config/gen_fuzzing_config.go @@ -0,0 +1,166 @@ +// Code generated by github.com/fjl/gencodec. DO NOT EDIT. + +package config + +import ( + "encoding/json" + "math/big" + + "github.com/crytic/medusa/chain/config" + "github.com/ethereum/go-ethereum/common/hexutil" +) + +var _ = (*fuzzingConfigMarshaling)(nil) + +// MarshalJSON marshals as JSON. +func (f FuzzingConfig) MarshalJSON() ([]byte, error) { + type FuzzingConfig struct { + Workers int `json:"workers"` + WorkerResetLimit int `json:"workerResetLimit"` + Timeout int `json:"timeout"` + TestLimit uint64 `json:"testLimit"` + ShrinkLimit uint64 `json:"shrinkLimit"` + CallSequenceLength int `json:"callSequenceLength"` + CorpusDirectory string `json:"corpusDirectory"` + CoverageEnabled bool `json:"coverageEnabled"` + CoverageFormats []string `json:"coverageFormats"` + TargetContracts []string `json:"targetContracts"` + PredeployedContracts map[string]string `json:"predeployedContracts"` + TargetContractsBalances []*hexutil.Big `json:"targetContractsBalances"` + ConstructorArgs map[string]map[string]any `json:"constructorArgs"` + DeployerAddress string `json:"deployerAddress"` + SenderAddresses []string `json:"senderAddresses"` + MaxBlockNumberDelay uint64 `json:"blockNumberDelayMax"` + MaxBlockTimestampDelay uint64 `json:"blockTimestampDelayMax"` + BlockGasLimit uint64 `json:"blockGasLimit"` + TransactionGasLimit uint64 `json:"transactionGasLimit"` + Testing TestingConfig `json:"testing"` + TestChainConfig config.TestChainConfig `json:"chainConfig"` + } + var enc FuzzingConfig + enc.Workers = f.Workers + enc.WorkerResetLimit = f.WorkerResetLimit + enc.Timeout = f.Timeout + enc.TestLimit = f.TestLimit + enc.ShrinkLimit = f.ShrinkLimit + enc.CallSequenceLength = f.CallSequenceLength + enc.CorpusDirectory = f.CorpusDirectory + enc.CoverageEnabled = f.CoverageEnabled + enc.CoverageFormats = f.CoverageFormats + enc.TargetContracts = f.TargetContracts + enc.PredeployedContracts = f.PredeployedContracts + if f.TargetContractsBalances != nil { + enc.TargetContractsBalances = make([]*hexutil.Big, len(f.TargetContractsBalances)) + for k, v := range f.TargetContractsBalances { + enc.TargetContractsBalances[k] = (*hexutil.Big)(v) + } + } + enc.ConstructorArgs = f.ConstructorArgs + enc.DeployerAddress = f.DeployerAddress + enc.SenderAddresses = f.SenderAddresses + enc.MaxBlockNumberDelay = f.MaxBlockNumberDelay + enc.MaxBlockTimestampDelay = f.MaxBlockTimestampDelay + enc.BlockGasLimit = f.BlockGasLimit + enc.TransactionGasLimit = f.TransactionGasLimit + enc.Testing = f.Testing + enc.TestChainConfig = f.TestChainConfig + return json.Marshal(&enc) +} + +// UnmarshalJSON unmarshals from JSON. +func (f *FuzzingConfig) UnmarshalJSON(input []byte) error { + type FuzzingConfig struct { + Workers *int `json:"workers"` + WorkerResetLimit *int `json:"workerResetLimit"` + Timeout *int `json:"timeout"` + TestLimit *uint64 `json:"testLimit"` + ShrinkLimit *uint64 `json:"shrinkLimit"` + CallSequenceLength *int `json:"callSequenceLength"` + CorpusDirectory *string `json:"corpusDirectory"` + CoverageEnabled *bool `json:"coverageEnabled"` + CoverageFormats []string `json:"coverageFormats"` + TargetContracts []string `json:"targetContracts"` + PredeployedContracts map[string]string `json:"predeployedContracts"` + TargetContractsBalances []*hexutil.Big `json:"targetContractsBalances"` + ConstructorArgs map[string]map[string]any `json:"constructorArgs"` + DeployerAddress *string `json:"deployerAddress"` + SenderAddresses []string `json:"senderAddresses"` + MaxBlockNumberDelay *uint64 `json:"blockNumberDelayMax"` + MaxBlockTimestampDelay *uint64 `json:"blockTimestampDelayMax"` + BlockGasLimit *uint64 `json:"blockGasLimit"` + TransactionGasLimit *uint64 `json:"transactionGasLimit"` + Testing *TestingConfig `json:"testing"` + TestChainConfig *config.TestChainConfig `json:"chainConfig"` + } + var dec FuzzingConfig + if err := json.Unmarshal(input, &dec); err != nil { + return err + } + if dec.Workers != nil { + f.Workers = *dec.Workers + } + if dec.WorkerResetLimit != nil { + f.WorkerResetLimit = *dec.WorkerResetLimit + } + if dec.Timeout != nil { + f.Timeout = *dec.Timeout + } + if dec.TestLimit != nil { + f.TestLimit = *dec.TestLimit + } + if dec.ShrinkLimit != nil { + f.ShrinkLimit = *dec.ShrinkLimit + } + if dec.CallSequenceLength != nil { + f.CallSequenceLength = *dec.CallSequenceLength + } + if dec.CorpusDirectory != nil { + f.CorpusDirectory = *dec.CorpusDirectory + } + if dec.CoverageEnabled != nil { + f.CoverageEnabled = *dec.CoverageEnabled + } + if dec.CoverageFormats != nil { + f.CoverageFormats = dec.CoverageFormats + } + if dec.TargetContracts != nil { + f.TargetContracts = dec.TargetContracts + } + if dec.PredeployedContracts != nil { + f.PredeployedContracts = dec.PredeployedContracts + } + if dec.TargetContractsBalances != nil { + f.TargetContractsBalances = make([]*big.Int, len(dec.TargetContractsBalances)) + for k, v := range dec.TargetContractsBalances { + f.TargetContractsBalances[k] = (*big.Int)(v) + } + } + if dec.ConstructorArgs != nil { + f.ConstructorArgs = dec.ConstructorArgs + } + if dec.DeployerAddress != nil { + f.DeployerAddress = *dec.DeployerAddress + } + if dec.SenderAddresses != nil { + f.SenderAddresses = dec.SenderAddresses + } + if dec.MaxBlockNumberDelay != nil { + f.MaxBlockNumberDelay = *dec.MaxBlockNumberDelay + } + if dec.MaxBlockTimestampDelay != nil { + f.MaxBlockTimestampDelay = *dec.MaxBlockTimestampDelay + } + if dec.BlockGasLimit != nil { + f.BlockGasLimit = *dec.BlockGasLimit + } + if dec.TransactionGasLimit != nil { + f.TransactionGasLimit = *dec.TransactionGasLimit + } + if dec.Testing != nil { + f.Testing = *dec.Testing + } + if dec.TestChainConfig != nil { + f.TestChainConfig = *dec.TestChainConfig + } + return nil +} diff --git a/fuzzing/contracts/contract.go b/fuzzing/contracts/contract.go index 30d4feb6..30ad094a 100644 --- a/fuzzing/contracts/contract.go +++ b/fuzzing/contracts/contract.go @@ -1,7 +1,11 @@ package contracts import ( + "golang.org/x/exp/slices" + "strings" + "github.com/crytic/medusa/compilation/types" + "github.com/ethereum/go-ethereum/accounts/abi" ) // Contracts describes an array of contracts @@ -32,17 +36,58 @@ type Contract struct { // compiledContract describes the compiled contract data. compiledContract *types.CompiledContract + + // compilation describes the compilation which contains the compiledContract. + compilation *types.Compilation + + // PropertyTestMethods are the methods that are property tests. + PropertyTestMethods []abi.Method + + // OptimizationTestMethods are the methods that are optimization tests. + OptimizationTestMethods []abi.Method + + // AssertionTestMethods are ALL other methods that are not property or optimization tests by default. + // If configured, the methods will be targeted or excluded based on the targetFunctionSignatures + // and excludedFunctionSignatures, respectively. + AssertionTestMethods []abi.Method } // NewContract returns a new Contract instance with the provided information. -func NewContract(name string, sourcePath string, compiledContract *types.CompiledContract) *Contract { +func NewContract(name string, sourcePath string, compiledContract *types.CompiledContract, compilation *types.Compilation) *Contract { return &Contract{ name: name, sourcePath: sourcePath, compiledContract: compiledContract, + compilation: compilation, } } +// WithTargetedAssertionMethods filters the assertion test methods to those in the target list. +func (c *Contract) WithTargetedAssertionMethods(target []string) *Contract { + var candidateMethods []abi.Method + for _, method := range c.AssertionTestMethods { + canonicalSig := strings.Join([]string{c.name, method.Sig}, ".") + if slices.Contains(target, canonicalSig) { + candidateMethods = append(candidateMethods, method) + } + } + c.AssertionTestMethods = candidateMethods + return c +} + +// WithExcludedAssertionMethods filters the assertion test methods to all methods not in excluded list. +func (c *Contract) WithExcludedAssertionMethods(excludedMethods []string) *Contract { + var candidateMethods []abi.Method + for _, method := range c.AssertionTestMethods { + canonicalSig := strings.Join([]string{c.name, method.Sig}, ".") + if !slices.Contains(excludedMethods, canonicalSig) { + candidateMethods = append(candidateMethods, method) + } + } + c.AssertionTestMethods = candidateMethods + return c +} + // Name returns the name of the contract. func (c *Contract) Name() string { return c.name @@ -57,3 +102,8 @@ func (c *Contract) SourcePath() string { func (c *Contract) CompiledContract() *types.CompiledContract { return c.compiledContract } + +// Compilation returns the compilation which contains the CompiledContract. +func (c *Contract) Compilation() *types.Compilation { + return c.compilation +} diff --git a/fuzzing/corpus/corpus.go b/fuzzing/corpus/corpus.go index 0b78dbcd..8a640298 100644 --- a/fuzzing/corpus/corpus.go +++ b/fuzzing/corpus/corpus.go @@ -2,21 +2,25 @@ package corpus import ( "bytes" - "encoding/json" "fmt" + "math/big" + "os" + "path/filepath" + "sync" + "time" + + "github.com/crytic/medusa/utils" + "github.com/crytic/medusa/chain" "github.com/crytic/medusa/fuzzing/calls" "github.com/crytic/medusa/fuzzing/coverage" - "github.com/crytic/medusa/utils" + "github.com/crytic/medusa/logging" + "github.com/crytic/medusa/logging/colors" "github.com/crytic/medusa/utils/randomutils" "github.com/ethereum/go-ethereum/common" - "math/big" - "os" - "path/filepath" - "sync" + "github.com/google/uuid" "github.com/crytic/medusa/fuzzing/contracts" - "github.com/google/uuid" ) // Corpus describes an archive of fuzzer-generated artifacts used to further fuzzing efforts. These artifacts are @@ -29,77 +33,144 @@ type Corpus struct { // coverageMaps describes the total code coverage known to be achieved across all corpus call sequences. coverageMaps *coverage.CoverageMaps - // callSequences is a list of call sequences that increased coverage or otherwise were found to be valuable - // to the fuzzer. - callSequences []*corpusFile[calls.CallSequence] + // callSequenceFiles represents a corpus directory with files that should be used for mutations. + callSequenceFiles *corpusDirectory[calls.CallSequence] + + // testResultSequenceFiles represents a corpus directory with files which describe call sequences that were flagged + // to be saved by a test case provider. These are not used in mutations. + testResultSequenceFiles *corpusDirectory[calls.CallSequence] // unexecutedCallSequences defines the callSequences which have not yet been executed by the fuzzer. As each item // is selected for execution by the fuzzer on startup, it is removed. This way, all call sequences loaded from disk // are executed to check for test failures. unexecutedCallSequences []calls.CallSequence - // weightedCallSequenceChooser is a provider that allows for weighted random selection of callSequences. If a + // mutationTargetSequenceChooser is a provider that allows for weighted random selection of callSequences. If a // call sequence was not found to be compatible with this run, it is not added to the chooser. - weightedCallSequenceChooser *randomutils.WeightedRandomChooser[calls.CallSequence] + mutationTargetSequenceChooser *randomutils.WeightedRandomChooser[calls.CallSequence] // callSequencesLock provides thread synchronization to prevent concurrent access errors into // callSequences. callSequencesLock sync.Mutex -} - -// corpusFile represents corpus data and its state on the filesystem. -type corpusFile[T any] struct { - // filePath describes the path the file should be written to. If blank, this indicates it has not yet been written. - filePath string - // data describes an object whose data should be written to the file. - data T + // logger describes the Corpus's log object that can be used to log important events + logger *logging.Logger } // NewCorpus initializes a new Corpus object, reading artifacts from the provided directory. If the directory refers // to an empty path, artifacts will not be persistently stored. func NewCorpus(corpusDirectory string) (*Corpus, error) { + var err error corpus := &Corpus{ storageDirectory: corpusDirectory, coverageMaps: coverage.NewCoverageMaps(), - callSequences: make([]*corpusFile[calls.CallSequence], 0), + callSequenceFiles: newCorpusDirectory[calls.CallSequence](""), + testResultSequenceFiles: newCorpusDirectory[calls.CallSequence](""), unexecutedCallSequences: make([]calls.CallSequence, 0), + logger: logging.GlobalLogger.NewSubLogger("module", "corpus"), } - // If we have a corpus directory set, parse it. + // If we have a corpus directory set, parse our call sequences. if corpus.storageDirectory != "" { - // Read all call sequences discovered in the relevant corpus directory. - matches, err := filepath.Glob(filepath.Join(corpus.CallSequencesDirectory(), "*.json")) + // Migrate the legacy corpus structure + // Note that it is important to call this first since we want to move all the call sequence files before reading + // them into the corpus + err = corpus.migrateLegacyCorpus() + if err != nil { + return nil, err + } + + // Read call sequences. + corpus.callSequenceFiles.path = filepath.Join(corpus.storageDirectory, "call_sequences") + err = corpus.callSequenceFiles.readFiles("*.json") + if err != nil { + return nil, err + } + + // Read test case provider related call sequences (test failures, etc). + corpus.testResultSequenceFiles.path = filepath.Join(corpus.storageDirectory, "test_results") + err = corpus.testResultSequenceFiles.readFiles("*.json") if err != nil { return nil, err } - for i := 0; i < len(matches); i++ { - // Alias our file path. - filePath := matches[i] + } + + return corpus, nil +} + +// migrateLegacyCorpus is used to read in the legacy corpus standard where call sequences were stored in two separate +// directories (mutable/immutable). +func (c *Corpus) migrateLegacyCorpus() error { + // Check to see if the mutable and/or the immutable directories exist + callSequencePath := filepath.Join(c.storageDirectory, "call_sequences") + mutablePath := filepath.Join(c.storageDirectory, "call_sequences", "mutable") + immutablePath := filepath.Join(c.storageDirectory, "call_sequences", "immutable") + + // Only return an error if the error is something other than "filepath does not exist" + mutableDirInfo, err := os.Stat(mutablePath) + if err != nil && !os.IsNotExist(err) { + return err + } + immutableDirInfo, err := os.Stat(immutablePath) + if err != nil && !os.IsNotExist(err) { + return err + } + + // Return early if these directories do not exist + if mutableDirInfo == nil && immutableDirInfo == nil { + return nil + } + + // Now, we need to notify the user that we have detected a legacy structure + c.logger.Info("Migrating legacy corpus") + + // If the mutable directory exists, read in all the files and add them to the call sequence files + if mutableDirInfo != nil { + // Discover all corpus files in the given directory. + filePaths, err := filepath.Glob(filepath.Join(mutablePath, "*.json")) + if err != nil { + return err + } - // Read the call sequence data. - b, err := os.ReadFile(filePath) + // Move each file from the mutable directory to the parent call_sequences directory + for _, filePath := range filePaths { + err = utils.MoveFile(filePath, filepath.Join(callSequencePath, filepath.Base(filePath))) if err != nil { - return nil, err + return err } + } + + // Delete the mutable directory + err = utils.DeleteDirectory(mutablePath) + if err != nil { + return err + } + } + + // If the immutable directory exists, read in all the files and add them to the call sequence files + if immutableDirInfo != nil { + // Discover all corpus files in the given directory. + filePaths, err := filepath.Glob(filepath.Join(immutablePath, "*.json")) + if err != nil { + return err + } - // Parse the call sequence data. - var seq calls.CallSequence - err = json.Unmarshal(b, &seq) + // Move each file from the immutable directory to the parent call_sequences directory + for _, filePath := range filePaths { + err = utils.MoveFile(filePath, filepath.Join(callSequencePath, filepath.Base(filePath))) if err != nil { - return nil, err + return err } + } - // Add entry to corpus - corpus.callSequences = append(corpus.callSequences, &corpusFile[calls.CallSequence]{ - filePath: filePath, - data: seq, - }) + // Delete the immutable directory + err = utils.DeleteDirectory(immutablePath) + if err != nil { + return err } } - // Initialize our weighted random chooser - return corpus, nil + return nil } // CoverageMaps exposes coverage details for all call sequences known to the corpus. @@ -107,86 +178,48 @@ func (c *Corpus) CoverageMaps() *coverage.CoverageMaps { return c.coverageMaps } -// StorageDirectory returns the root directory path of the corpus. If this is empty, it indicates persistent storage -// will not be used. -func (c *Corpus) StorageDirectory() string { - return c.storageDirectory -} - -// CallSequencesDirectory returns the directory path where coverage increasing call sequences should be stored. -// This is a subdirectory of StorageDirectory. If StorageDirectory is empty, this is as well, indicating persistent -// storage will not be used. -func (c *Corpus) CallSequencesDirectory() string { - if c.storageDirectory == "" { - return "" - } - return filepath.Join(c.StorageDirectory(), "call_sequences") -} - -// CallSequenceCount returns the total number of call sequences in the corpus, some of which may be inactive/not in use. -func (c *Corpus) CallSequenceCount() int { - return len(c.callSequences) +// CallSequenceEntryCount returns the total number of call sequences that increased coverage and also any test results +// that led to a failure. +func (c *Corpus) CallSequenceEntryCount() (int, int) { + return len(c.callSequenceFiles.files), len(c.testResultSequenceFiles.files) } -// ActiveCallSequenceCount returns the count of call sequences recorded in the corpus which have been validated and are -// ready for use by RandomCallSequence. -func (c *Corpus) ActiveCallSequenceCount() int { - if c.weightedCallSequenceChooser == nil { +// ActiveMutableSequenceCount returns the count of call sequences recorded in the corpus which have been validated +// after Corpus initialization and are ready for use in mutations. +func (c *Corpus) ActiveMutableSequenceCount() int { + if c.mutationTargetSequenceChooser == nil { return 0 } - return c.weightedCallSequenceChooser.ChoiceCount() + return c.mutationTargetSequenceChooser.ChoiceCount() } -// Initialize initializes any runtime data needed for a Corpus on startup. Call sequences are replayed on the post-setup -// (deployment) test chain to calculate coverage, while resolving references to compiled contracts. -func (c *Corpus) Initialize(baseTestChain *chain.TestChain, contractDefinitions contracts.Contracts) error { - // Acquire our call sequences lock during the duration of this method. - c.callSequencesLock.Lock() - defer c.callSequencesLock.Unlock() - - // Initialize our call sequence structures. - c.weightedCallSequenceChooser = randomutils.NewWeightedRandomChooser[calls.CallSequence]() - c.unexecutedCallSequences = make([]calls.CallSequence, 0) - - // Create new coverage maps to track total coverage and a coverage tracer to do so. - c.coverageMaps = coverage.NewCoverageMaps() - coverageTracer := coverage.NewCoverageTracer() - - // Create our structure and event listeners to track deployed contracts - deployedContracts := make(map[common.Address]*contracts.Contract, 0) - - // Clone our test chain, adding listeners for contract deployment events from genesis. - testChain, err := baseTestChain.Clone(func(newChain *chain.TestChain) error { - // After genesis, prior to adding other blocks, we attach our coverage tracer - newChain.AddTracer(coverageTracer, true, false) - - // We also track any contract deployments, so we can resolve contract/method definitions for corpus call - // sequences. - newChain.Events.ContractDeploymentAddedEventEmitter.Subscribe(func(event chain.ContractDeploymentsAddedEvent) error { - matchedContract := contractDefinitions.MatchBytecode(event.Contract.InitBytecode, event.Contract.RuntimeBytecode) - if matchedContract != nil { - deployedContracts[event.Contract.Address] = matchedContract - } - return nil - }) - newChain.Events.ContractDeploymentRemovedEventEmitter.Subscribe(func(event chain.ContractDeploymentsRemovedEvent) error { - delete(deployedContracts, event.Contract.Address) - return nil - }) - return nil - }) - if err != nil { - return fmt.Errorf("failed to initialize coverage maps, base test chain cloning encountered error: %v", err) +// RandomMutationTargetSequence returns a weighted random call sequence from the Corpus, or an error if one occurs. +func (c *Corpus) RandomMutationTargetSequence() (calls.CallSequence, error) { + // If we didn't initialize a chooser, return an error + if c.mutationTargetSequenceChooser == nil { + return nil, fmt.Errorf("corpus could not return a random call sequence because the corpus was not initialized") } - // Next we replay every call sequence, checking its validity on this chain and measuring coverage. If the sequence - // is valid, we add it to our weighted list for future random selection. + // Pick a random call sequence, then clone it before returning it, so the original is untainted. + seq, err := c.mutationTargetSequenceChooser.Choose() + if seq == nil || err != nil { + return nil, err + } + return seq.Clone() +} +// initializeSequences is a helper method for Initialize. It validates a list of call sequence files on a given +// chain, using the map of deployed contracts (e.g. to check for non-existent method called, due to code changes). +// Valid call sequences are added to the list of un-executed sequences the fuzzer should execute first. +// If this sequence list being initialized is for use with mutations, it is added to the mutationTargetSequenceChooser. +// Returns an error if one occurs. +func (c *Corpus) initializeSequences(sequenceFiles *corpusDirectory[calls.CallSequence], testChain *chain.TestChain, deployedContracts map[common.Address]*contracts.Contract, useInMutations bool) error { // Cache current HeadBlockNumber so that you can reset back to it after every sequence baseBlockNumber := testChain.HeadBlockNumber() // Loop for each sequence - for _, sequenceFileData := range c.callSequences { + var err error + for _, sequenceFileData := range sequenceFiles.files { // Unwrap the underlying sequence. sequence := sequenceFileData.data @@ -201,24 +234,25 @@ func (c *Corpus) Initialize(baseTestChain *chain.TestChain, contractDefinitions // If we are deploying a contract and not targeting one with this call, there should be no work to do. currentSequenceElement := sequence[currentIndex] - if currentSequenceElement.Call.MsgTo == nil { + if currentSequenceElement.Call.To == nil { return currentSequenceElement, nil } // We are calling a contract with this call, ensure we can resolve the contract call is targeting. - resolvedContract, resolvedContractExists := deployedContracts[*currentSequenceElement.Call.MsgTo] + resolvedContract, resolvedContractExists := deployedContracts[*currentSequenceElement.Call.To] if !resolvedContractExists { - sequenceInvalidError = fmt.Errorf("contract at address '%v' could not be resolved", currentSequenceElement.Call.MsgTo.String()) + sequenceInvalidError = fmt.Errorf("contract at address '%v' could not be resolved", currentSequenceElement.Call.To.String()) return nil, nil } currentSequenceElement.Contract = resolvedContract // Next, if our sequence element uses ABI values to produce call data, our deserialized data is not yet // sufficient for runtime use, until we use it to resolve runtime references. - callAbiValues := currentSequenceElement.Call.MsgDataAbiValues + callAbiValues := currentSequenceElement.Call.DataAbiValues if callAbiValues != nil { sequenceInvalidError = callAbiValues.Resolve(currentSequenceElement.Contract.CompiledContract().Abi) if sequenceInvalidError != nil { + sequenceInvalidError = fmt.Errorf("error resolving method in contract '%v': %v", currentSequenceElement.Contract.Name(), sequenceInvalidError) return nil, nil } } @@ -230,7 +264,7 @@ func (c *Corpus) Initialize(baseTestChain *chain.TestChain, contractDefinitions // Update our coverage maps for each call executed in our sequence. lastExecutedSequenceElement := currentlyExecutedSequence[len(currentlyExecutedSequence)-1] covMaps := coverage.GetCoverageTracerResults(lastExecutedSequenceElement.ChainReference.MessageResults()) - _, covErr := c.coverageMaps.Update(covMaps) + _, _, covErr := c.coverageMaps.Update(covMaps) if covErr != nil { return true, covErr } @@ -245,40 +279,121 @@ func (c *Corpus) Initialize(baseTestChain *chain.TestChain, contractDefinitions return fmt.Errorf("failed to initialize coverage maps from corpus, encountered an error while executing call sequence: %v\n", err) } - // If the sequence was replayed successfully, we add a weighted choice for it, for future selection. If it was - // not, we simply exclude it from our chooser and print a warning. + // If the sequence was replayed successfully, we add it. If it was not, we exclude it with a warning. if sequenceInvalidError == nil { - c.weightedCallSequenceChooser.AddChoices(randomutils.NewWeightedRandomChoice[calls.CallSequence](sequence, big.NewInt(1))) + if useInMutations && c.mutationTargetSequenceChooser != nil { + c.mutationTargetSequenceChooser.AddChoices(randomutils.NewWeightedRandomChoice[calls.CallSequence](sequence, big.NewInt(1))) + } c.unexecutedCallSequences = append(c.unexecutedCallSequences, sequence) } else { - fmt.Printf("corpus item '%v' disabled due to error when replaying it: %v\n", sequenceFileData.filePath, sequenceInvalidError) + c.logger.Debug("Corpus item ", colors.Bold, sequenceFileData.fileName, colors.Reset, " disabled due to error when replaying it", sequenceInvalidError) } // Revert chain state to our starting point to test the next sequence. - err = testChain.RevertToBlockNumber(baseBlockNumber) - if err != nil { + if err := testChain.RevertToBlockNumber(baseBlockNumber); err != nil { return fmt.Errorf("failed to reset the chain while seeding coverage: %v\n", err) } } return nil } -// AddCallSequence adds a call sequence to the corpus and returns an error in case of an issue -func (c *Corpus) AddCallSequence(seq calls.CallSequence, weight *big.Int, flushImmediately bool) error { +// Initialize initializes any runtime data needed for a Corpus on startup. Call sequences are replayed on the post-setup +// (deployment) test chain to calculate coverage, while resolving references to compiled contracts. +// Returns the active number of corpus items, total number of corpus items, or an error if one occurred. If an error +// is returned, then the corpus counts returned will always be zero. +func (c *Corpus) Initialize(baseTestChain *chain.TestChain, contractDefinitions contracts.Contracts) (int, int, error) { + // Acquire our call sequences lock during the duration of this method. + c.callSequencesLock.Lock() + defer c.callSequencesLock.Unlock() + + // Initialize our call sequence structures. + c.mutationTargetSequenceChooser = randomutils.NewWeightedRandomChooser[calls.CallSequence]() + c.unexecutedCallSequences = make([]calls.CallSequence, 0) + + // Create a coverage tracer to track coverage across all blocks. + c.coverageMaps = coverage.NewCoverageMaps() + coverageTracer := coverage.NewCoverageTracer() + + // Create our structure and event listeners to track deployed contracts + deployedContracts := make(map[common.Address]*contracts.Contract, 0) + + // Clone our test chain, adding listeners for contract deployment events from genesis. + testChain, err := baseTestChain.Clone(func(newChain *chain.TestChain) error { + // After genesis, prior to adding other blocks, we attach our coverage tracer + newChain.AddTracer(coverageTracer.NativeTracer(), true, false) + + // We also track any contract deployments, so we can resolve contract/method definitions for corpus call + // sequences. + newChain.Events.ContractDeploymentAddedEventEmitter.Subscribe(func(event chain.ContractDeploymentsAddedEvent) error { + matchedContract := contractDefinitions.MatchBytecode(event.Contract.InitBytecode, event.Contract.RuntimeBytecode) + if matchedContract != nil { + deployedContracts[event.Contract.Address] = matchedContract + } + return nil + }) + newChain.Events.ContractDeploymentRemovedEventEmitter.Subscribe(func(event chain.ContractDeploymentsRemovedEvent) error { + delete(deployedContracts, event.Contract.Address) + return nil + }) + return nil + }) + if err != nil { + return 0, 0, fmt.Errorf("failed to initialize coverage maps, base test chain cloning encountered error: %v", err) + } + + // Set our coverage maps to those collected when replaying all blocks when cloning. + c.coverageMaps = coverage.NewCoverageMaps() + for _, block := range testChain.CommittedBlocks() { + for _, messageResults := range block.MessageResults { + covMaps := coverage.GetCoverageTracerResults(messageResults) + _, _, covErr := c.coverageMaps.Update(covMaps) + if covErr != nil { + return 0, 0, err + } + } + } + + // Next we replay every call sequence, checking its validity on this chain and measuring coverage. Valid sequences + // are added to the corpus for mutations, re-execution, etc. + // + // The order of initializations here is important, as it determines the order of "unexecuted sequences" to replay + // when the fuzzer's worker starts up. We want to replay test results first, so that other corpus items + // do not trigger the same test failures instead. + err = c.initializeSequences(c.testResultSequenceFiles, testChain, deployedContracts, false) + if err != nil { + return 0, 0, err + } + + err = c.initializeSequences(c.callSequenceFiles, testChain, deployedContracts, true) + if err != nil { + return 0, 0, err + } + + // Calculate corpus health metrics + corpusSequencesTotal := len(c.callSequenceFiles.files) + len(c.testResultSequenceFiles.files) + corpusSequencesActive := len(c.unexecutedCallSequences) + + return corpusSequencesActive, corpusSequencesTotal, nil +} + +// addCallSequence adds a call sequence to the corpus in a given corpus directory. +// Returns an error, if one occurs. +func (c *Corpus) addCallSequence(sequenceFiles *corpusDirectory[calls.CallSequence], sequence calls.CallSequence, useInMutations bool, mutationChooserWeight *big.Int, flushImmediately bool) error { // Acquire a thread lock during modification of call sequence lists. c.callSequencesLock.Lock() // Check if call sequence has been added before, if so, exit without any action. - seqHash, err := seq.Hash() + seqHash, err := sequence.Hash() if err != nil { return err } // Verify no existing corpus item hash this same hash. - for _, existingSeq := range c.callSequences { + for _, existingSeq := range sequenceFiles.files { // Calculate the existing sequence hash existingSeqHash, err := existingSeq.data.Hash() if err != nil { + c.callSequencesLock.Unlock() return err } @@ -289,18 +404,19 @@ func (c *Corpus) AddCallSequence(seq calls.CallSequence, weight *big.Int, flushI } } - // Update our sequences with the new entry. - c.callSequences = append(c.callSequences, &corpusFile[calls.CallSequence]{ - filePath: "", - data: seq, - }) + // Update our corpus directory with the new entry. + fileName := fmt.Sprintf("%v-%v.json", time.Now().UnixNano(), uuid.New().String()) + err = sequenceFiles.addFile(fileName, sequence) + if err != nil { + return err + } - // If we have initialized a chooser, add our call sequence item to it. - if c.weightedCallSequenceChooser != nil { - if weight == nil { - weight = big.NewInt(1) + // If we want to use this sequence in mutations and initialized a chooser, add our call sequence item to it. + if useInMutations && c.mutationTargetSequenceChooser != nil { + if mutationChooserWeight == nil { + mutationChooserWeight = big.NewInt(1) } - c.weightedCallSequenceChooser.AddChoices(randomutils.NewWeightedRandomChoice[calls.CallSequence](seq, weight)) + c.mutationTargetSequenceChooser.AddChoices(randomutils.NewWeightedRandomChoice[calls.CallSequence](sequence, mutationChooserWeight)) } // Unlock now, as flushing will lock on its own. @@ -314,18 +430,26 @@ func (c *Corpus) AddCallSequence(seq calls.CallSequence, weight *big.Int, flushI } } -// AddCallSequenceIfCoverageChanged checks if the most recent call executed in the provided call sequence achieved +// AddTestResultCallSequence adds a call sequence recorded to the corpus due to a test case provider flagging it to be +// recorded. +// Returns an error, if one occurs. +func (c *Corpus) AddTestResultCallSequence(callSequence calls.CallSequence, mutationChooserWeight *big.Int, flushImmediately bool) error { + return c.addCallSequence(c.testResultSequenceFiles, callSequence, false, mutationChooserWeight, flushImmediately) +} + +// CheckSequenceCoverageAndUpdate checks if the most recent call executed in the provided call sequence achieved // coverage the Corpus did not with any of its call sequences. If it did, the call sequence is added to the corpus // and the Corpus coverage maps are updated accordingly. // Returns an error if one occurs. -func (c *Corpus) AddCallSequenceIfCoverageChanged(callSequence calls.CallSequence, weight *big.Int, flushImmediately bool) error { +func (c *Corpus) CheckSequenceCoverageAndUpdate(callSequence calls.CallSequence, mutationChooserWeight *big.Int, flushImmediately bool) error { // If we have coverage-guided fuzzing disabled or no calls in our sequence, there is nothing to do. if len(callSequence) == 0 { return nil } // Obtain our coverage maps for our last call. - lastCallChainReference := callSequence[len(callSequence)-1].ChainReference + lastCall := callSequence[len(callSequence)-1] + lastCallChainReference := lastCall.ChainReference lastMessageResult := lastCallChainReference.Block.MessageResults[lastCallChainReference.TransactionIndex] lastMessageCoverageMaps := coverage.GetCoverageTracerResults(lastMessageResult) @@ -338,13 +462,15 @@ func (c *Corpus) AddCallSequenceIfCoverageChanged(callSequence calls.CallSequenc coverage.RemoveCoverageTracerResults(lastMessageResult) // Merge the coverage maps into our total coverage maps and check if we had an update. - coverageUpdated, err := c.coverageMaps.Update(lastMessageCoverageMaps) + coverageUpdated, revertedCoverageUpdated, err := c.coverageMaps.Update(lastMessageCoverageMaps) if err != nil { return err } - if coverageUpdated { - // New coverage has been found with this call sequence, so we add it to the corpus. - err = c.AddCallSequence(callSequence, weight, flushImmediately) + + // If we had an increase in non-reverted or reverted coverage, we save the sequence. + if coverageUpdated || revertedCoverageUpdated { + // If we achieved new coverage, save this sequence for mutation purposes. + err = c.addCallSequence(c.callSequenceFiles, callSequence, true, mutationChooserWeight, flushImmediately) if err != nil { return err } @@ -352,21 +478,6 @@ func (c *Corpus) AddCallSequenceIfCoverageChanged(callSequence calls.CallSequenc return nil } -// RandomCallSequence returns a weighted random call sequence from the Corpus, or an error if one occurs. -func (c *Corpus) RandomCallSequence() (calls.CallSequence, error) { - // If we didn't initialize a chooser, return an error - if c.weightedCallSequenceChooser == nil { - return nil, fmt.Errorf("corpus could not return a random call sequence because the corpus was not initialized") - } - - // Pick a random call sequence, then clone it before returning it, so the original is untainted. - seq, err := c.weightedCallSequenceChooser.Choose() - if seq == nil || err != nil { - return nil, err - } - return seq.Clone() -} - // UnexecutedCallSequence returns a call sequence loaded from disk which has not yet been returned by this method. // It is intended to be used by the fuzzer to run all un-executed call sequences (without mutations) to check for test // failures. If a call sequence is returned, it will not be returned by this method again. @@ -408,35 +519,17 @@ func (c *Corpus) Flush() error { c.callSequencesLock.Lock() defer c.callSequencesLock.Unlock() - // Ensure the corpus directories exists. - err := utils.MakeDirectory(c.storageDirectory) + // Write all coverage-increasing call sequences. + err := c.callSequenceFiles.writeFiles() if err != nil { return err } - err = utils.MakeDirectory(c.CallSequencesDirectory()) + + // Write test case provider related call sequences (test failures, etc). + err = c.testResultSequenceFiles.writeFiles() if err != nil { return err } - // Write all call sequences to disk - // TODO: This can be optimized by storing/indexing unwritten sequences separately and only iterating over those. - for _, sequenceFile := range c.callSequences { - if sequenceFile.filePath == "" { - // Determine the file path to write this to. - sequenceFile.filePath = filepath.Join(c.CallSequencesDirectory(), uuid.New().String()+".json") - - // Marshal the call sequence - jsonEncodedData, err := json.MarshalIndent(sequenceFile.data, "", " ") - if err != nil { - return err - } - - // Write the JSON encoded data. - err = os.WriteFile(sequenceFile.filePath, jsonEncodedData, os.ModePerm) - if err != nil { - return fmt.Errorf("An error occurred while writing call sequence to disk: %v\n", err) - } - } - } return nil } diff --git a/fuzzing/corpus/corpus_files.go b/fuzzing/corpus/corpus_files.go new file mode 100644 index 00000000..c1354f61 --- /dev/null +++ b/fuzzing/corpus/corpus_files.go @@ -0,0 +1,187 @@ +package corpus + +import ( + "encoding/json" + "fmt" + "github.com/crytic/medusa/utils" + "os" + "path/filepath" + "strings" + "sync" +) + +// corpusFile represents corpus data and its state on the filesystem. +type corpusFile[T any] struct { + // fileName describes the filename the file should be written with, in the corpusDirectory.path. + fileName string + + // data describes an object whose data should be written to the file. + data T + + // writtenToDisk indicates whether the corpus item has been flushed to disk yet. If this is false, it signals that + // the data should be written or overwritten on disk. + writtenToDisk bool +} + +// corpusDirectory is a provider for corpusFile items in a given directory, offering read/write operations to +// automatically JSON serialize/deserialize items of a given type to a directory. +type corpusDirectory[T any] struct { + // path signifies the directory to store corpusFile items within. If the path is an empty string, files + // will not be read from, or written to disk. + path string + + // files represents the corpusFile items stored/to be stored in the specified directory. + files []*corpusFile[T] + + // filesLock represents a thread lock used when editing files. + filesLock sync.Mutex +} + +// newCorpusDirectory returns a new corpusDirectory with the provided directory path set. +// If the directory path is an empty string, then files will not be read from, or written to disk. +func newCorpusDirectory[T any](path string) *corpusDirectory[T] { + return &corpusDirectory[T]{ + path: path, + files: make([]*corpusFile[T], 0), + } +} + +// addFile adds a given file to the file list (to later be written to the directory if a path was provided). +// If a corpusFile exists with the provided file name, it is overwritten in the list (but not yet flushed to disk). +// If a corpusFile does not exist with the provided file name, it is added. +// Returns an error, if one occurred. +func (cd *corpusDirectory[T]) addFile(fileName string, data T) error { + // Lock to avoid concurrency issues when accessing the files list + cd.filesLock.Lock() + defer cd.filesLock.Unlock() + + // First we make sure this file doesn't already exist, if it does, we overwrite its data and mark it unwritten. + lowerFileName := strings.ToLower(fileName) + for i := 0; i < len(cd.files); i++ { + if lowerFileName == strings.ToLower(cd.files[i].fileName) { + cd.files[i].data = data + cd.files[i].writtenToDisk = false + return nil + } + } + + // If the file otherwise did not exist, we add it. + cd.files = append(cd.files, &corpusFile[T]{ + fileName: fileName, + data: data, + writtenToDisk: false, + }) + return nil +} + +// removeFile removes a given file from the file list. This does not delete it from disk. +// Returns a boolean indicating if a corpusFile with the provided file name was found and removed. +func (cd *corpusDirectory[T]) removeFile(fileName string) bool { + // Lock to avoid concurrency issues when accessing the files list + cd.filesLock.Lock() + defer cd.filesLock.Unlock() + + // If we find the filename, remove it from our list of files. + lowerFileName := strings.ToLower(fileName) + for i := 0; i < len(cd.files); i++ { + if lowerFileName == strings.ToLower(cd.files[i].fileName) { + cd.files = append(cd.files[:i], cd.files[i+1:]...) + return true + } + } + return false +} + +// readFiles takes a provided glob pattern representing files to parse within the corpusDirectory.path. +// It parses any matching file into a corpusFile and adds it to the corpusDirectory. +// Returns an error, if one occurred. +func (cd *corpusDirectory[T]) readFiles(filePattern string) error { + // If our directory path specified is empty, we do not read/write to disk. + if cd.path == "" { + return nil + } + + // Discover all corpus files in the given directory. + filePaths, err := filepath.Glob(filepath.Join(cd.path, filePattern)) + if err != nil { + return err + } + + // Refresh our files list + cd.files = make([]*corpusFile[T], 0) + + // Loop for every file path provided + for _, filePath := range filePaths { + // Read the file data. + b, err := os.ReadFile(filePath) + if err != nil { + return err + } + + // Parse the call sequence data. + var fileData T + err = json.Unmarshal(b, &fileData) + if err != nil { + return err + } + + // Add entry to corpus + cd.files = append(cd.files, &corpusFile[T]{ + fileName: filepath.Base(filePath), + data: fileData, + writtenToDisk: true, + }) + } + return nil +} + +// writeFiles flushes all corpusDirectory.files to disk, if they have corpusFile.writtenToDisk set as false. +// It then sets corpusFile.writtenToDisk as true for each flushed to disk. +// Returns an error, if one occurred. +func (cd *corpusDirectory[T]) writeFiles() error { + // TODO: This can be optimized by storing/indexing unwritten sequences separately and only iterating over those. + + // If our directory path is empty, we do not write anything. + if cd.path == "" { + return nil + } + + // Lock to avoid concurrency issues when accessing the files list + cd.filesLock.Lock() + defer cd.filesLock.Unlock() + + // Ensure the corpus directory path exists. + err := utils.MakeDirectory(cd.path) + if err != nil { + return err + } + + // For each file which does not have an assigned file path yet, we flush it to disk. + for _, file := range cd.files { + if !file.writtenToDisk { + // If we don't have a filename, throw an error. + if len(file.fileName) == 0 { + return fmt.Errorf("failed to flush corpus item to disk as it does not have a filename") + } + + // Determine the file path to write this to. + filePath := filepath.Join(cd.path, file.fileName) + + // Marshal the data + jsonEncodedData, err := json.MarshalIndent(file.data, "", " ") + if err != nil { + return err + } + + // Write the JSON encoded data. + err = os.WriteFile(filePath, jsonEncodedData, os.ModePerm) + if err != nil { + return fmt.Errorf("An error occurred while writing corpus data to file: %v\n", err) + } + + // Update our written to disk status. + file.writtenToDisk = true + } + } + return nil +} diff --git a/fuzzing/corpus/corpus_test.go b/fuzzing/corpus/corpus_test.go index f9c5751a..5997dbdd 100644 --- a/fuzzing/corpus/corpus_test.go +++ b/fuzzing/corpus/corpus_test.go @@ -23,7 +23,7 @@ func getMockSimpleCorpus(minSequences int, maxSequences, minBlocks int, maxBlock // Add the requested number of entries. numSequences := minSequences + (rand.Int() % (maxSequences - minSequences)) for i := 0; i < numSequences; i++ { - err := corpus.AddCallSequence(getMockCallSequence(minBlocks+(rand.Int()%(maxBlocks-minBlocks))), nil, false) + err := corpus.addCallSequence(corpus.callSequenceFiles, getMockCallSequence(minBlocks+(rand.Int()%(maxBlocks-minBlocks))), true, nil, false) if err != nil { return nil, err } @@ -31,7 +31,7 @@ func getMockSimpleCorpus(minSequences int, maxSequences, minBlocks int, maxBlock return corpus, nil } -// getMockSimpleCorpusEntry creates a mock CorpusCallSequence with numBlocks blocks for testing +// getMockCallSequence creates a mock CorpusCallSequence with numBlocks blocks for testing func getMockCallSequence(size int) calls.CallSequence { cs := make(calls.CallSequence, size) for i := 0; i < size; i++ { @@ -40,7 +40,7 @@ func getMockCallSequence(size int) calls.CallSequence { return cs } -// getMockSimpleBlockBlock creates a mock CorpusBlock with numTransactions transactions and receipts for testing +// getMockCallSequenceElement creates a mock CorpusBlock with numTransactions transactions and receipts for testing func getMockCallSequenceElement() *calls.CallSequenceElement { return &calls.CallSequenceElement{ Contract: nil, @@ -55,15 +55,15 @@ func getMockCallSequenceElement() *calls.CallSequenceElement { func getMockCallSequenceElementCall() *calls.CallMessage { to := common.BigToAddress(big.NewInt(rand.Int63())) txn := calls.CallMessage{ - MsgFrom: common.BigToAddress(big.NewInt(rand.Int63())), - MsgTo: &to, - MsgNonce: rand.Uint64(), - MsgValue: big.NewInt(int64(rand.Int())), - MsgGas: rand.Uint64(), - MsgGasPrice: big.NewInt(int64(rand.Int())), - MsgGasFeeCap: big.NewInt(int64(rand.Int())), - MsgGasTipCap: big.NewInt(int64(rand.Int())), - MsgData: []byte{uint8(rand.Uint64()), uint8(rand.Uint64()), uint8(rand.Uint64()), uint8(rand.Uint64())}, + From: common.BigToAddress(big.NewInt(rand.Int63())), + To: &to, + Nonce: rand.Uint64(), + Value: big.NewInt(int64(rand.Int())), + GasLimit: rand.Uint64(), + GasPrice: big.NewInt(int64(rand.Int())), + GasFeeCap: big.NewInt(int64(rand.Int())), + GasTipCap: big.NewInt(int64(rand.Int())), + Data: []byte{uint8(rand.Uint64()), uint8(rand.Uint64()), uint8(rand.Uint64()), uint8(rand.Uint64())}, } return &txn } @@ -100,9 +100,9 @@ func TestCorpusReadWrite(t *testing.T) { assert.NoError(t, err) // Ensure that there are the correct number of call sequence files - matches, err := filepath.Glob(filepath.Join(corpus.CallSequencesDirectory(), "*.json")) + matches, err := filepath.Glob(filepath.Join(corpus.callSequenceFiles.path, "*.json")) assert.NoError(t, err) - assert.EqualValues(t, corpus.CallSequenceCount(), len(matches), "Did not find numEntries matches") + assert.EqualValues(t, len(corpus.callSequenceFiles.files), len(matches)) // Wipe corpus clean so that you can now read it in from disk corpus, err = NewCorpus("corpus") @@ -124,7 +124,7 @@ func TestCorpusCallSequenceMarshaling(t *testing.T) { // Run the test in our temporary test directory to avoid artifact pollution. testutils.ExecuteInDirectory(t, t.TempDir(), func() { // For each entry, marshal it and then unmarshal the byte array - for _, entryFile := range corpus.callSequences { + for _, entryFile := range corpus.callSequenceFiles.files { // Marshal the entry b, err := json.Marshal(entryFile.data) assert.NoError(t, err) @@ -137,5 +137,11 @@ func TestCorpusCallSequenceMarshaling(t *testing.T) { // Check equality testCorpusCallSequencesEqual(t, entryFile.data, sameEntry) } + + // Remove all items + for i := 0; i < len(corpus.callSequenceFiles.files); { + corpus.callSequenceFiles.removeFile(corpus.callSequenceFiles.files[i].fileName) + } + assert.Empty(t, corpus.callSequenceFiles.files) }) } diff --git a/fuzzing/coverage/coverage_maps.go b/fuzzing/coverage/coverage_maps.go index e9d54811..59237669 100644 --- a/fuzzing/coverage/coverage_maps.go +++ b/fuzzing/coverage/coverage_maps.go @@ -1,31 +1,34 @@ package coverage import ( - "bytes" - "fmt" + "golang.org/x/exp/slices" + + compilationTypes "github.com/crytic/medusa/compilation/types" + "github.com/crytic/medusa/utils" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/crypto" "sync" ) // CoverageMaps represents a data structure used to identify instruction execution coverage of various smart contracts // across a transaction or multiple transactions. type CoverageMaps struct { - // maps represents a structure used to track every codeCoverageData by a given deployed address/code hash. - maps map[common.Address]map[common.Hash]*codeCoverageData + // maps represents a structure used to track every ContractCoverageMap by a given deployed address/lookup hash. + maps map[common.Hash]map[common.Address]*ContractCoverageMap // cachedCodeAddress represents the last code address which coverage was updated for. This is used to prevent an // expensive lookup in maps. If cachedCodeHash does not match the current code address for which we are updating // coverage for, it, along with other cache variables are updated. cachedCodeAddress common.Address - // cachedCodeHash represents the last code hash which coverage was updated for. This is used to prevent an expensive - // lookup in maps. If cachedCodeHash does not match the current code hash for which we are updating coverage for, - // it, along with other cache variables are updated. + // cachedCodeHash represents the last lookup hash which coverage was updated for. This is used to prevent an + // expensive lookup in maps. If cachedCodeHash does not match the current code hash which we are updating + // coverage for, it, along with other cache variables are updated. cachedCodeHash common.Hash // cachedMap represents the last coverage map which was updated. If the coverage to update resides at the // cachedCodeAddress and matches the cachedCodeHash, then this map is used to avoid an expensive lookup into maps. - cachedMap *codeCoverageData + cachedMap *ContractCoverageMap // updateLock is a lock to offer concurrent thread safety for map accesses. updateLock sync.Mutex @@ -40,15 +43,91 @@ func NewCoverageMaps() *CoverageMaps { // Reset clears the coverage state for the CoverageMaps. func (cm *CoverageMaps) Reset() { - cm.maps = make(map[common.Address]map[common.Hash]*codeCoverageData) + cm.maps = make(map[common.Hash]map[common.Address]*ContractCoverageMap) + cm.cachedCodeAddress = common.Address{} + cm.cachedCodeHash = common.Hash{} + cm.cachedMap = nil } -// Update updates the current coverage maps with the provided ones. It returns a boolean indicating whether -// new coverage was achieved, or an error if one was encountered. -func (cm *CoverageMaps) Update(coverageMaps *CoverageMaps) (bool, error) { +// Equal checks whether two coverage maps are the same. Equality is determined if the keys and values are all the same. +func (cm *CoverageMaps) Equal(b *CoverageMaps) bool { + // Iterate through all maps + for codeHash, mapsByAddressA := range cm.maps { + mapsByAddressB, ok := b.maps[codeHash] + // Hash is not in b - we're done + if !ok { + return false + } + for codeAddress, coverageMapA := range mapsByAddressA { + coverageMapB, ok := mapsByAddressB[codeAddress] + // Address is not in b - we're done + if !ok { + return false + } + + // Verify the equality of the map data. + if !coverageMapA.Equal(coverageMapB) { + return false + } + } + } + return true +} + +// getContractCoverageMapHash obtain the hash used to look up a given contract's ContractCoverageMap. +// If this is init bytecode, metadata and abi arguments will attempt to be stripped, then a hash is computed. +// If this is runtime bytecode, the metadata ipfs/swarm hash will be used if available, otherwise the bytecode +// is hashed. +// Returns the resulting lookup hash. +func getContractCoverageMapHash(bytecode []byte, init bool) common.Hash { + // If available, the metadata code hash should be unique and reliable to use above all (for runtime bytecode). + if !init { + metadata := compilationTypes.ExtractContractMetadata(bytecode) + if metadata != nil { + metadataHash := metadata.ExtractBytecodeHash() + if metadataHash != nil { + return common.BytesToHash(metadataHash) + } + } + } + + // Otherwise, we use the hash of the bytecode after attempting to strip metadata (and constructor args). + strippedBytecode := compilationTypes.RemoveContractMetadata(bytecode) + return crypto.Keccak256Hash(strippedBytecode) +} + +// GetContractCoverageMap obtains a total coverage map representing coverage for the provided bytecode. +// If the provided bytecode could not find coverage maps, nil is returned. +// Returns the total coverage map, or an error if one occurs. +func (cm *CoverageMaps) GetContractCoverageMap(bytecode []byte, init bool) (*ContractCoverageMap, error) { + // Obtain the lookup hash + hash := getContractCoverageMapHash(bytecode, init) + + // Acquire our thread lock and defer our unlocking for when we exit this method + cm.updateLock.Lock() + defer cm.updateLock.Unlock() + + // Loop through all coverage maps for this hash and collect our total coverage. + if coverageByAddresses, ok := cm.maps[hash]; ok { + totalCoverage := newContractCoverageMap() + for _, coverage := range coverageByAddresses { + _, _, err := totalCoverage.update(coverage) + if err != nil { + return nil, err + } + } + return totalCoverage, nil + } else { + return nil, nil + } +} + +// Update updates the current coverage maps with the provided ones. +// Returns two booleans indicating whether successful or reverted coverage changed, or an error if one occurred. +func (cm *CoverageMaps) Update(coverageMaps *CoverageMaps) (bool, bool, error) { // If our maps provided are nil, do nothing if coverageMaps == nil { - return false, nil + return false, false, nil } // Acquire our thread lock and defer our unlocking for when we exit this method @@ -56,39 +135,42 @@ func (cm *CoverageMaps) Update(coverageMaps *CoverageMaps) (bool, error) { defer cm.updateLock.Unlock() // Create a boolean indicating whether we achieved new coverage - changed := false + successCoverageChanged := false + revertedCoverageChanged := false // Loop for each coverage map provided - for codeAddressToMerge, mapsByCodeHashToMerge := range coverageMaps.maps { - for codeHashToMerge, coverageMapToMerge := range mapsByCodeHashToMerge { - // If a coverage map lookup for this code address doesn't exist, create the mapping. - mapsByCodeHash, codeAddressExists := cm.maps[codeAddressToMerge] - if !codeAddressExists { - mapsByCodeHash = make(map[common.Hash]*codeCoverageData) - cm.maps[codeAddressToMerge] = mapsByCodeHash + for codeHash, mapsByAddressToMerge := range coverageMaps.maps { + for codeAddress, coverageMapToMerge := range mapsByAddressToMerge { + // If a coverage map lookup for this code hash doesn't exist, create the mapping. + mapsByAddress, codeHashExists := cm.maps[codeHash] + if !codeHashExists { + mapsByAddress = make(map[common.Address]*ContractCoverageMap) + cm.maps[codeHash] = mapsByAddress } - // If a coverage map for this code hash already exists in our current mapping, update it with the one + // If a coverage map for this address already exists in our current mapping, update it with the one // to merge. If it doesn't exist, set it to the one to merge. - if existingCoverageMap, codeHashExists := mapsByCodeHash[codeHashToMerge]; codeHashExists { - coverageMapChanged, err := existingCoverageMap.updateCodeCoverageData(coverageMapToMerge) - changed = changed || coverageMapChanged + if existingCoverageMap, codeAddressExists := mapsByAddress[codeAddress]; codeAddressExists { + sChanged, rChanged, err := existingCoverageMap.update(coverageMapToMerge) + successCoverageChanged = successCoverageChanged || sChanged + revertedCoverageChanged = revertedCoverageChanged || rChanged if err != nil { - return changed, err + return successCoverageChanged, revertedCoverageChanged, err } } else { - mapsByCodeHash[codeHashToMerge] = coverageMapToMerge - changed = true + mapsByAddress[codeAddress] = coverageMapToMerge + successCoverageChanged = coverageMapToMerge.successfulCoverage != nil + revertedCoverageChanged = coverageMapToMerge.revertedCoverage != nil } } } // Return our results - return changed, nil + return successCoverageChanged, revertedCoverageChanged, nil } -// SetCoveredAt sets the coverage state of a given program counter location within a codeCoverageData. -func (cm *CoverageMaps) SetCoveredAt(codeAddress common.Address, codeHash common.Hash, init bool, codeSize int, pc uint64) (bool, error) { +// UpdateAt updates the hit count of a given program counter location within code coverage data. +func (cm *CoverageMaps) UpdateAt(codeAddress common.Address, codeLookupHash common.Hash, codeSize int, pc uint64) (bool, error) { // If the code size is zero, do nothing if codeSize == 0 { return false, nil @@ -98,152 +180,239 @@ func (cm *CoverageMaps) SetCoveredAt(codeAddress common.Address, codeHash common var ( addedNewMap bool changedInMap bool - coverageMap *codeCoverageData + coverageMap *ContractCoverageMap err error ) - // Try to obtain a coverage map for the given code hash from our cache - if cm.cachedMap != nil && cm.cachedCodeAddress == codeAddress && cm.cachedCodeHash == codeHash { + // Try to obtain a coverage map from our cache + if cm.cachedMap != nil && cm.cachedCodeAddress == codeAddress && cm.cachedCodeHash == codeLookupHash { coverageMap = cm.cachedMap } else { - // If a coverage map lookup for this code address doesn't exist, create the mapping. - coverageMapsByCodeHash, codeAddressExists := cm.maps[codeAddress] - if !codeAddressExists { - coverageMapsByCodeHash = make(map[common.Hash]*codeCoverageData) - cm.maps[codeAddress] = coverageMapsByCodeHash + // If a coverage map lookup for this code hash doesn't exist, create the mapping. + mapsByCodeAddress, codeHashExists := cm.maps[codeLookupHash] + if !codeHashExists { + mapsByCodeAddress = make(map[common.Address]*ContractCoverageMap) + cm.maps[codeLookupHash] = mapsByCodeAddress } - // Obtain the coverage map for this code hash if it already exists. If it does not, create a new one. - if existingCoverageMap, codeHashExists := coverageMapsByCodeHash[codeHash]; codeHashExists { + // Obtain the coverage map for this code address if it already exists. If it does not, create a new one. + if existingCoverageMap, codeAddressExists := mapsByCodeAddress[codeAddress]; codeAddressExists { coverageMap = existingCoverageMap } else { - coverageMap = &codeCoverageData{ - initBytecodeCoverageData: nil, - deployedBytecodeCoverageData: nil, - } - cm.maps[codeAddress][codeHash] = coverageMap + coverageMap = newContractCoverageMap() + cm.maps[codeLookupHash][codeAddress] = coverageMap addedNewMap = true } // Set our cached variables for faster coverage setting next time this method is called. cm.cachedMap = coverageMap - cm.cachedCodeHash = codeHash + cm.cachedCodeHash = codeLookupHash cm.cachedCodeAddress = codeAddress } // Set our coverage in the map and return our change state - changedInMap, err = coverageMap.setCodeCoverageDataAt(init, codeSize, pc) + changedInMap, err = coverageMap.updateCoveredAt(codeSize, pc) + return addedNewMap || changedInMap, err } -// Equals checks whether two coverage maps are the same. Equality is determined if the keys and values are all the same. -func (a *CoverageMaps) Equals(b *CoverageMaps) bool { - // Note: the `map` field is what is being tested for equality. Not the cached values +// RevertAll sets all coverage in the coverage map as reverted coverage. Reverted coverage is updated with successful +// coverage, the successful coverage is cleared. +// Returns a boolean indicating whether reverted coverage increased, and an error if one occurred. +func (cm *CoverageMaps) RevertAll() (bool, error) { + // Acquire our thread lock and defer our unlocking for when we exit this method + cm.updateLock.Lock() + defer cm.updateLock.Unlock() + + // Define a variable to track if our reverted coverage changed. + revertedCoverageChanged := false - // Iterate through all maps - for addr, aHashToCoverage := range a.maps { - bHashToCoverage, ok := b.maps[addr] - // Address is not in b - we're done - if !ok { - return false - } - for hash, aCoverage := range aHashToCoverage { - bCoverage, ok := bHashToCoverage[hash] - // Hash is not in b - we're done - if !ok { - return false - } - // Compare that the deployed bytecode coverages are the same - equal := bytes.Compare(aCoverage.deployedBytecodeCoverageData, bCoverage.deployedBytecodeCoverageData) - if equal != 0 { - return false - } - // Compare that the init bytecode coverages are the same - equal = bytes.Compare(aCoverage.initBytecodeCoverageData, bCoverage.initBytecodeCoverageData) - if equal != 0 { - return false + // Loop for each coverage map provided + for _, mapsByAddressToMerge := range cm.maps { + for _, contractCoverageMap := range mapsByAddressToMerge { + // Update our reverted coverage with the (previously thought to be) successful coverage. + changed, err := contractCoverageMap.revertedCoverage.update(contractCoverageMap.successfulCoverage) + revertedCoverageChanged = revertedCoverageChanged || changed + if err != nil { + return revertedCoverageChanged, err } + + // Clear our successful coverage, as these maps were marked as reverted. + contractCoverageMap.successfulCoverage.Reset() } } - return true + return revertedCoverageChanged, nil } -// codeCoverageData represents a data structure used to identify instruction execution coverage of contract byte code. -type codeCoverageData struct { - // initBytecodeCoverageData represents a list of bytes for each byte of a contract's init bytecode. Non-zero values - // indicate the program counter executed an instruction at that offset. - initBytecodeCoverageData []byte - // deployedBytecodeCoverageData represents a list of bytes for each byte of a contract's deployed bytecode. Non-zero - // values indicate the program counter executed an instruction at that offset. - deployedBytecodeCoverageData []byte -} +// UniquePCs is a function that returns the total number of unique program counters (PCs) +func (cm *CoverageMaps) UniquePCs() uint64 { + uniquePCs := uint64(0) + // Iterate across each contract deployment + for _, mapsByAddress := range cm.maps { + for _, contractCoverageMap := range mapsByAddress { + // TODO: Note we are not checking for nil dereference here because we are guaranteed that the successful + // coverage and reverted coverage arrays have been instantiated if we are iterating over it -// updateCodeCoverageData creates updates the current coverage map with the provided one. It returns a boolean indicating whether -// new coverage was achieved, or an error if one was encountered. -func (cm *codeCoverageData) updateCodeCoverageData(coverageMap *codeCoverageData) (bool, error) { - // Define our return variable - changed := false + // Iterate across each PC in the successful coverage array + // We do not separately iterate over the reverted coverage array because if there is no data about a + // successful PC execution, then it is not possible for that PC to have ever reverted either + for i, hits := range contractCoverageMap.successfulCoverage.executedFlags { + // If we hit the PC at least once, we have a unique PC hit + if hits != 0 { + uniquePCs++ - // Update our init bytecode coverage data. - if coverageMap.initBytecodeCoverageData != nil { - if cm.initBytecodeCoverageData == nil { - cm.initBytecodeCoverageData = coverageMap.initBytecodeCoverageData - changed = true - } else { - // Update each byte which represents a position in the bytecode which was covered. We ignore any size - // differences as init bytecode can have arbitrary length arguments appended. - for i := 0; i < len(cm.initBytecodeCoverageData) || i < len(coverageMap.initBytecodeCoverageData); i++ { - if cm.initBytecodeCoverageData[i] == 0 && coverageMap.initBytecodeCoverageData[i] != 0 { - cm.initBytecodeCoverageData[i] = 1 - changed = true + // Do not count both success and revert + continue + } + + // This is only executed if the PC was not executed successfully + if contractCoverageMap.revertedCoverage.executedFlags != nil && contractCoverageMap.revertedCoverage.executedFlags[i] != 0 { + uniquePCs++ } } } } + return uniquePCs +} + +// ContractCoverageMap represents a data structure used to identify instruction execution coverage of a contract. +type ContractCoverageMap struct { + // successfulCoverage represents coverage for the contract bytecode, which did not encounter a revert and was + // deemed successful. + successfulCoverage *CoverageMapBytecodeData - // Update our deployed bytecode coverage data. - if coverageMap.deployedBytecodeCoverageData != nil { - if cm.deployedBytecodeCoverageData == nil { - cm.deployedBytecodeCoverageData = coverageMap.deployedBytecodeCoverageData + // revertedCoverage represents coverage for the contract bytecode, which encountered a revert. + revertedCoverage *CoverageMapBytecodeData +} + +// newContractCoverageMap creates and returns a new ContractCoverageMap. +func newContractCoverageMap() *ContractCoverageMap { + return &ContractCoverageMap{ + successfulCoverage: &CoverageMapBytecodeData{}, + revertedCoverage: &CoverageMapBytecodeData{}, + } +} + +// Equal checks whether the provided ContractCoverageMap contains the same data as the current one. +// Returns a boolean indicating whether the two maps match. +func (cm *ContractCoverageMap) Equal(b *ContractCoverageMap) bool { + // Compare both our underlying bytecode coverage maps. + return cm.successfulCoverage.Equal(b.successfulCoverage) && cm.revertedCoverage.Equal(b.revertedCoverage) +} + +// update updates the current ContractCoverageMap with the provided one. +// Returns two booleans indicating whether successful or reverted coverage changed, or an error if one was encountered. +func (cm *ContractCoverageMap) update(coverageMap *ContractCoverageMap) (bool, bool, error) { + // Update our success coverage data + successfulCoverageChanged, err := cm.successfulCoverage.update(coverageMap.successfulCoverage) + if err != nil { + return false, false, err + } + + // Update our reverted coverage data + revertedCoverageChanged, err := cm.revertedCoverage.update(coverageMap.revertedCoverage) + if err != nil { + return successfulCoverageChanged, false, err + } + + return successfulCoverageChanged, revertedCoverageChanged, nil +} + +// updateCoveredAt updates the hit counter at a given program counter location within a ContractCoverageMap used for +// "successful" coverage (non-reverted). +// Returns a boolean indicating whether new coverage was achieved, or an error if one occurred. +func (cm *ContractCoverageMap) updateCoveredAt(codeSize int, pc uint64) (bool, error) { + // Set our coverage data for the successful path. + return cm.successfulCoverage.updateCoveredAt(codeSize, pc) +} + +// CoverageMapBytecodeData represents a data structure used to identify instruction execution coverage of some init +// or runtime bytecode. +type CoverageMapBytecodeData struct { + executedFlags []uint +} + +// Reset resets the bytecode coverage map data to be empty. +func (cm *CoverageMapBytecodeData) Reset() { + cm.executedFlags = nil +} + +// Equal checks whether the provided CoverageMapBytecodeData contains the same data as the current one. +// Returns a boolean indicating whether the two maps match. +func (cm *CoverageMapBytecodeData) Equal(b *CoverageMapBytecodeData) bool { + // Return an equality comparison on the data, ignoring size checks by stopping at the end of the shortest slice. + // We do this to avoid comparing arbitrary length constructor arguments appended to init bytecode. + smallestSize := utils.Min(len(cm.executedFlags), len(b.executedFlags)) + // TODO: Currently we are checking equality by making sure the two maps have the same hit counts + // it may make sense to just check that both of them are greater than zero + return slices.Equal(cm.executedFlags[:smallestSize], b.executedFlags[:smallestSize]) +} + +// HitCount returns the number of times that the provided program counter (PC) has been hit. If zero is returned, then +// the PC has not been hit, the map is empty, or the PC is out-of-bounds +func (cm *CoverageMapBytecodeData) HitCount(pc int) uint { + // If the coverage map bytecode data is nil, this is not covered. + if cm == nil { + return 0 + } + + // If this map has no execution data or is out of bounds, it is not covered. + if cm.executedFlags == nil || len(cm.executedFlags) <= pc { + return 0 + } + + // Otherwise, return the hit count + return cm.executedFlags[pc] +} + +// update updates the hit count of the current CoverageMapBytecodeData with the provided one. +// Returns a boolean indicating whether new coverage was achieved, or an error if one was encountered. +func (cm *CoverageMapBytecodeData) update(coverageMap *CoverageMapBytecodeData) (bool, error) { + // If the coverage map execution data provided is nil, exit early + if coverageMap.executedFlags == nil { + return false, nil + } + + // If the current map has no execution data, simply set it to the provided one. + if cm.executedFlags == nil { + cm.executedFlags = coverageMap.executedFlags + return true, nil + } + + // Update each byte which represents a position in the bytecode which was covered. + changed := false + for i := 0; i < len(cm.executedFlags) && i < len(coverageMap.executedFlags); i++ { + // Only update the map if we haven't seen this coverage before + if cm.executedFlags[i] == 0 && coverageMap.executedFlags[i] != 0 { + cm.executedFlags[i] += coverageMap.executedFlags[i] changed = true - } else { - // Update each byte which represents a position in the bytecode which was covered. - for i := 0; i < len(cm.deployedBytecodeCoverageData); i++ { - if cm.deployedBytecodeCoverageData[i] == 0 && coverageMap.deployedBytecodeCoverageData[i] != 0 { - cm.deployedBytecodeCoverageData[i] = 1 - changed = true - } - } } } - return changed, nil } -// setCodeCoverageDataAt sets the coverage state of a given program counter location within a codeCoverageData. -func (cm *codeCoverageData) setCodeCoverageDataAt(init bool, codeSize int, pc uint64) (bool, error) { - // Obtain our coverage data depending on if we're initializing/deploying a contract now. If coverage data doesn't - // exist, we create it. - var coverageData []byte - if init { - if cm.initBytecodeCoverageData == nil { - cm.initBytecodeCoverageData = make([]byte, codeSize) - } - coverageData = cm.initBytecodeCoverageData - } else { - if cm.deployedBytecodeCoverageData == nil { - cm.deployedBytecodeCoverageData = make([]byte, codeSize) - } - coverageData = cm.deployedBytecodeCoverageData +// updateCoveredAt updates the hit count at a given program counter location within a CoverageMapBytecodeData. +// Returns a boolean indicating whether new coverage was achieved, or an error if one occurred. +func (cm *CoverageMapBytecodeData) updateCoveredAt(codeSize int, pc uint64) (bool, error) { + // If the execution flags don't exist, create them for this code size. + if cm.executedFlags == nil { + cm.executedFlags = make([]uint, codeSize) } - // If our program counter is in range, determine if we achieved new coverage for the first time, and update it. - if pc < uint64(len(coverageData)) { - if coverageData[pc] == 0 { - coverageData[pc] = 1 + // If our program counter is in range, determine if we achieved new coverage for the first time or increment the hit counter. + if pc < uint64(len(cm.executedFlags)) { + // Increment the hit counter + cm.executedFlags[pc] += 1 + + // This is the first time we have hit this PC, so return true + if cm.executedFlags[pc] == 1 { return true, nil } + // We have seen this PC before, return false return false, nil } - return false, fmt.Errorf("tried to set coverage map out of bounds (pc: %d, code size %d)", pc, len(coverageData)) + + // Since it is possible that the program counter is larger than the code size (e.g., malformed bytecode), we will + // simply return false with no error + return false, nil } diff --git a/fuzzing/coverage/coverage_tracer.go b/fuzzing/coverage/coverage_tracer.go index eed18ca4..0cbe0785 100644 --- a/fuzzing/coverage/coverage_tracer.go +++ b/fuzzing/coverage/coverage_tracer.go @@ -1,12 +1,16 @@ package coverage import ( - "fmt" + "math/big" + + "github.com/crytic/medusa/chain" "github.com/crytic/medusa/chain/types" - compilationTypes "github.com/crytic/medusa/compilation/types" + "github.com/crytic/medusa/logging" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/tracing" + coretypes "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/vm" - "math/big" + "github.com/ethereum/go-ethereum/eth/tracers" ) // coverageTracerResultsKey describes the key to use when storing tracer results in call message results, or when @@ -32,7 +36,7 @@ func RemoveCoverageTracerResults(messageResults *types.MessageResults) { delete(messageResults.AdditionalResults, coverageTracerResultsKey) } -// CoverageTracer implements vm.EVMLogger to collect information such as coverage maps +// CoverageTracer implements tracers.Tracer to collect information such as coverage maps // for fuzzing campaigns from EVM execution traces. type CoverageTracer struct { // coverageMaps describes the execution coverage recorded. Call frames which errored are not recorded. @@ -42,13 +46,19 @@ type CoverageTracer struct { callFrameStates []*coverageTracerCallFrameState // callDepth refers to the current EVM depth during tracing. - callDepth uint64 + callDepth int + + evmContext *tracing.VMContext + + // nativeTracer is the underlying tracer used to capture EVM execution. + nativeTracer *chain.TestChainTracer - // cachedCodeHashOriginal describes the code hash used to last store coverage. - cachedCodeHashOriginal common.Hash - // cachedCodeHashResolved describes the code hash used to store the last coverage map. If the contract metadata - // code hash is embedded, then it is used. Otherwise, this refers to cachedCodeHashOriginal. - cachedCodeHashResolved common.Hash + // codeHashCache is a cache for values returned by getContractCoverageMapHash, + // so that this expensive calculation doesn't need to be done every opcode. + // The [2] array is to differentiate between contract init (0) vs runtime (1), + // since init vs runtime produces different results from getContractCoverageMapHash. + // The Hash key is a contract's codehash, which uniquely identifies it. + codeHashCache [2]map[common.Hash]common.Hash } // coverageTracerCallFrameState tracks state across call frames in the tracer. @@ -58,6 +68,9 @@ type coverageTracerCallFrameState struct { // pendingCoverageMap describes the coverage maps recorded for this call frame. pendingCoverageMap *CoverageMaps + + // lookupHash describes the hash used to look up the ContractCoverageMap being updated in this frame. + lookupHash *common.Hash } // NewCoverageTracer returns a new CoverageTracer. @@ -65,117 +78,123 @@ func NewCoverageTracer() *CoverageTracer { tracer := &CoverageTracer{ coverageMaps: NewCoverageMaps(), callFrameStates: make([]*coverageTracerCallFrameState, 0), + codeHashCache: [2]map[common.Hash]common.Hash{make(map[common.Hash]common.Hash), make(map[common.Hash]common.Hash)}, + } + nativeTracer := &tracers.Tracer{ + Hooks: &tracing.Hooks{ + OnTxStart: tracer.OnTxStart, + OnEnter: tracer.OnEnter, + OnExit: tracer.OnExit, + OnOpcode: tracer.OnOpcode, + }, } + tracer.nativeTracer = &chain.TestChainTracer{Tracer: nativeTracer, CaptureTxEndSetAdditionalResults: tracer.CaptureTxEndSetAdditionalResults} + return tracer } -// CaptureTxStart is called upon the start of transaction execution, as defined by vm.EVMLogger. -func (t *CoverageTracer) CaptureTxStart(gasLimit uint64) { - // Reset our capture state +// NativeTracer returns the underlying TestChainTracer. +func (t *CoverageTracer) NativeTracer() *chain.TestChainTracer { + return t.nativeTracer +} + +// CaptureTxStart is called upon the start of transaction execution, as defined by tracers.Tracer. +func (t *CoverageTracer) OnTxStart(vm *tracing.VMContext, tx *coretypes.Transaction, from common.Address) { + // Reset our call frame states t.callDepth = 0 t.coverageMaps = NewCoverageMaps() t.callFrameStates = make([]*coverageTracerCallFrameState, 0) - t.cachedCodeHashOriginal = common.Hash{} - - // Reset our call frame states. - t.callFrameStates = make([]*coverageTracerCallFrameState, 0) + t.evmContext = vm } -// CaptureTxEnd is called upon the end of transaction execution, as defined by vm.EVMLogger. -func (t *CoverageTracer) CaptureTxEnd(restGas uint64) { -} +// OnEnter initializes the tracing operation for the top of a call frame, as defined by tracers.Tracer. +func (t *CoverageTracer) OnEnter(depth int, typ byte, from common.Address, to common.Address, input []byte, gas uint64, value *big.Int) { + // Check to see if this is the top level call frame + isTopLevelFrame := depth == 0 -// CaptureStart initializes the tracing operation for the top of a call frame, as defined by vm.EVMLogger. -func (t *CoverageTracer) CaptureStart(env *vm.EVM, from common.Address, to common.Address, create bool, input []byte, gas uint64, value *big.Int) { - // Create our state tracking struct for this frame. - t.callFrameStates = append(t.callFrameStates, &coverageTracerCallFrameState{ - create: create, - pendingCoverageMap: NewCoverageMaps(), - }) -} - -// CaptureEnd is called after a call to finalize tracing completes for the top of a call frame, as defined by vm.EVMLogger. -func (t *CoverageTracer) CaptureEnd(output []byte, gasUsed uint64, err error) { - // If we didn't encounter an error in the end, we commit all our coverage maps to the final coverage map. - // If we encountered an error, we reverted, so we don't consider them. - if err == nil { - _, coverageUpdateErr := t.coverageMaps.Update(t.callFrameStates[t.callDepth].pendingCoverageMap) - if coverageUpdateErr != nil { - panic(fmt.Sprintf("coverage tracer failed to update coverage map during capture end: %v", coverageUpdateErr)) - } + // Increment call frame depth if it is not the top level call frame + if !isTopLevelFrame { + t.callDepth++ } - // Pop the state tracking struct for this call frame off the stack. - t.callFrameStates = t.callFrameStates[:t.callDepth] -} - -// CaptureEnter is called upon entering of the call frame, as defined by vm.EVMLogger. -func (t *CoverageTracer) CaptureEnter(typ vm.OpCode, from common.Address, to common.Address, input []byte, gas uint64, value *big.Int) { - // Increase our call depth now that we're entering a new call frame. - t.callDepth++ - // Create our state tracking struct for this frame. t.callFrameStates = append(t.callFrameStates, &coverageTracerCallFrameState{ - create: typ == vm.CREATE || typ == vm.CREATE2, + create: typ == byte(vm.CREATE) || typ == byte(vm.CREATE2), pendingCoverageMap: NewCoverageMaps(), }) } -// CaptureExit is called upon exiting of the call frame, as defined by vm.EVMLogger. -func (t *CoverageTracer) CaptureExit(output []byte, gasUsed uint64, err error) { - // If we didn't encounter an error in the end, we commit all our coverage maps up one call frame. - // If we encountered an error, we reverted, so we don't consider them. - if err == nil { - _, coverageUpdateErr := t.callFrameStates[t.callDepth-1].pendingCoverageMap.Update(t.callFrameStates[t.callDepth].pendingCoverageMap) - if coverageUpdateErr != nil { - panic(fmt.Sprintf("coverage tracer failed to update coverage map during capture exit: %v", coverageUpdateErr)) +// OnExit is called after a call to finalize tracing completes for the top of a call frame, as defined by tracers.Tracer. +func (t *CoverageTracer) OnExit(depth int, output []byte, gasUsed uint64, err error, reverted bool) { + // Check to see if this is the top level call frame + isTopLevelFrame := depth == 0 + + // If we encountered an error in this call frame, mark all coverage as reverted. + if err != nil { + _, revertCoverageErr := t.callFrameStates[t.callDepth].pendingCoverageMap.RevertAll() + if revertCoverageErr != nil { + logging.GlobalLogger.Panic("Coverage tracer failed to update revert coverage map during capture end", revertCoverageErr) } } - // Pop the state tracking struct for this call frame off the stack. - t.callFrameStates = t.callFrameStates[:t.callDepth] + // Commit all our coverage maps up one call frame. + var coverageUpdateErr error + if isTopLevelFrame { + // Update the final coverage map if this is the top level call frame + _, _, coverageUpdateErr = t.coverageMaps.Update(t.callFrameStates[t.callDepth].pendingCoverageMap) + } else { + // Move coverage up one call frame + _, _, coverageUpdateErr = t.callFrameStates[t.callDepth-1].pendingCoverageMap.Update(t.callFrameStates[t.callDepth].pendingCoverageMap) + + // Pop the state tracking struct for this call frame off the stack and decrement the call depth + t.callFrameStates = t.callFrameStates[:t.callDepth] + t.callDepth-- + } + if coverageUpdateErr != nil { + logging.GlobalLogger.Panic("Coverage tracer failed to update coverage map during capture end", coverageUpdateErr) + } - // Decrease our call depth now that we've exited a call frame. - t.callDepth-- } -// CaptureState records data from an EVM state update, as defined by vm.EVMLogger. -func (t *CoverageTracer) CaptureState(pc uint64, op vm.OpCode, gas, cost uint64, scope *vm.ScopeContext, rData []byte, depth int, vmErr error) { +// OnOpcode records data from an EVM state update, as defined by tracers.Tracer. +func (t *CoverageTracer) OnOpcode(pc uint64, op byte, gas, cost uint64, scope tracing.OpContext, rData []byte, depth int, err error) { // Obtain our call frame state tracking struct callFrameState := t.callFrameStates[t.callDepth] // If there is code we're executing, collect coverage. - if len(scope.Contract.Code) > 0 { - // We record coverage maps under a code hash to merge coverage across different deployments of a contract. - // We rely on the embedded contract metadata code hash if it is available, otherwise the immediate hash - // for this code. Because this method is called for every instruction executed, we cache the resolved - // code hash for performance reasons. - if t.cachedCodeHashOriginal != scope.Contract.CodeHash { - t.cachedCodeHashOriginal = scope.Contract.CodeHash - t.cachedCodeHashResolved = t.cachedCodeHashOriginal - if metadata := compilationTypes.ExtractContractMetadata(scope.Contract.Code); metadata != nil { - if metadataHash := metadata.ExtractBytecodeHash(); metadataHash != nil { - t.cachedCodeHashResolved = common.BytesToHash(metadataHash) - } + address := scope.Address() + // We can cast OpContext to ScopeContext because that is the type passed to OnOpcode. + scopeContext := scope.(*vm.ScopeContext) + code := scopeContext.Contract.Code + codeSize := len(code) + isCreate := callFrameState.create + gethCodeHash := scopeContext.Contract.CodeHash + + cacheArrayKey := 1 + if isCreate { + cacheArrayKey = 0 + } + + if codeSize > 0 { + + // Obtain our contract coverage map lookup hash. + if callFrameState.lookupHash == nil { + lookupHash, cacheHit := t.codeHashCache[cacheArrayKey][gethCodeHash] + if !cacheHit { + lookupHash = getContractCoverageMapHash(code, isCreate) + t.codeHashCache[cacheArrayKey][gethCodeHash] = lookupHash } + callFrameState.lookupHash = &lookupHash } - // If the resolved code hash is not zero (indicating a contract deployment from which we could not extract - // a metadata code hash), then we record coverage for this location in our map. - zeroHash := common.BigToHash(big.NewInt(0)) - if t.cachedCodeHashResolved != zeroHash { - _, coverageUpdateErr := callFrameState.pendingCoverageMap.SetCoveredAt(scope.Contract.Address(), t.cachedCodeHashResolved, callFrameState.create, len(scope.Contract.Code), pc) - if coverageUpdateErr != nil { - panic(fmt.Sprintf("coverage tracer failed to update coverage map while tracing state: %v", coverageUpdateErr)) - } + // Record coverage for this location in our map. + _, coverageUpdateErr := callFrameState.pendingCoverageMap.UpdateAt(address, *callFrameState.lookupHash, codeSize, pc) + if coverageUpdateErr != nil { + logging.GlobalLogger.Panic("Coverage tracer failed to update coverage map while tracing state", coverageUpdateErr) } } } -// CaptureFault records an execution fault, as defined by vm.EVMLogger. -func (t *CoverageTracer) CaptureFault(pc uint64, op vm.OpCode, gas, cost uint64, scope *vm.ScopeContext, depth int, err error) { -} - // CaptureTxEndSetAdditionalResults can be used to set additional results captured from execution tracing. If this // tracer is used during transaction execution (block creation), the results can later be queried from the block. // This method will only be called on the added tracer if it implements the extended TestChainTracer interface. diff --git a/fuzzing/coverage/report_generation.go b/fuzzing/coverage/report_generation.go new file mode 100644 index 00000000..b5125ea7 --- /dev/null +++ b/fuzzing/coverage/report_generation.go @@ -0,0 +1,109 @@ +package coverage + +import ( + _ "embed" + "fmt" + "html/template" + "math" + "os" + "path/filepath" + "strconv" + "time" + + "github.com/crytic/medusa/utils" +) + +var ( + //go:embed report_template.gohtml + htmlReportTemplate []byte +) + +// WriteHTMLReport takes a previously performed source analysis and generates an HTML coverage report from it. +func WriteHTMLReport(sourceAnalysis *SourceAnalysis, reportDir string) (string, error) { + // Define mappings onto some useful variables/functions. + functionMap := template.FuncMap{ + "timeNow": time.Now, + "add": func(x int, y int) int { + return x + y + }, + "relativePath": func(path string) string { + // Obtain a path relative to our current working directory. + // If we encounter an error, return the original path. + cwd, err := os.Getwd() + if err != nil { + return path + } + relativePath, err := filepath.Rel(cwd, path) + if err != nil { + return path + } + + return relativePath + }, + "percentageStr": func(x int, y int, decimals int) string { + // Determine our precision string + formatStr := "%." + strconv.Itoa(decimals) + "f" + + // If no lines are active and none are covered, show 0% coverage + if x == 0 && y == 0 { + return fmt.Sprintf(formatStr, float64(0)) + } + return fmt.Sprintf(formatStr, (float64(x)/float64(y))*100) + }, + "percentageInt": func(x int, y int) int { + if y == 0 { + return 100 + } + return int(math.Round(float64(x) / float64(y) * 100)) + }, + } + + // Parse our HTML template + tmpl, err := template.New("coverage_report.html").Funcs(functionMap).Parse(string(htmlReportTemplate)) + if err != nil { + return "", fmt.Errorf("could not export report, failed to parse report template: %v", err) + } + + // If the directory doesn't exist, create it. + err = utils.MakeDirectory(reportDir) + if err != nil { + return "", err + } + + // Create our report file + htmlReportPath := filepath.Join(reportDir, "coverage_report.html") + file, err := os.Create(htmlReportPath) + if err != nil { + _ = file.Close() + return "", fmt.Errorf("could not export report, failed to open file for writing: %v", err) + } + + // Execute the template and write it back to file. + err = tmpl.Execute(file, sourceAnalysis) + fileCloseErr := file.Close() + if err == nil { + err = fileCloseErr + } + return htmlReportPath, err +} + +// WriteLCOVReport takes a previously performed source analysis and generates an LCOV report from it. +func WriteLCOVReport(sourceAnalysis *SourceAnalysis, reportDir string) (string, error) { + // Generate the LCOV report. + lcovReport := sourceAnalysis.GenerateLCOVReport() + + // If the directory doesn't exist, create it. + err := utils.MakeDirectory(reportDir) + if err != nil { + return "", err + } + + // Write the LCOV report to a file. + lcovReportPath := filepath.Join(reportDir, "lcov.info") + err = os.WriteFile(lcovReportPath, []byte(lcovReport), 0644) + if err != nil { + return "", fmt.Errorf("could not export LCOV report: %v", err) + } + + return lcovReportPath, nil +} diff --git a/fuzzing/coverage/report_template.gohtml b/fuzzing/coverage/report_template.gohtml new file mode 100644 index 00000000..edcd4986 --- /dev/null +++ b/fuzzing/coverage/report_template.gohtml @@ -0,0 +1,250 @@ + + + + + Coverage Report + + + + +
+

Coverage Report

+
+ + + + + + + +
Files: {{len .Files}}
Lines: {{.LineCount}}
Covered: + {{/* Analyze some initial coverage metrics */}} + {{$totalLinesCovered := .CoveredLineCount}} + {{$totalLinesActive := .ActiveLineCount}} + {{$totalPercentCoverageInt := percentageInt $totalLinesCovered $totalLinesActive}} + + {{/* Output our coverage info with a progress bar alongside it.*/}} + {{/*The progress bar's color is set from HSL values (hue 0-100 is red->orange->yellow->green)*/}} + {{$totalLinesCovered}} / {{$totalLinesActive}} ({{percentageStr $totalLinesCovered $totalLinesActive 1}}%) + +
+
+
+ + {{/* Loop through all sources */}} + {{range $sourceFile := .SortedFiles}} + {{/* Analyze some initial coverage metrics */}} + {{$linesCovered := $sourceFile.CoveredLineCount}} + {{$linesActive := $sourceFile.ActiveLineCount}} + {{$linesCoveredPercentInt := percentageInt $linesCovered $linesActive}} + + {{/* Output a collapsible header/container for each source*/}} + {{if not $linesCoveredPercentInt}} + + {{else}} + + {{end}} +
+
+
+ {{/* Output the total line coverage statistics*/}} + + + + + +
Lines covered: {{$linesCovered}} / {{$linesActive}} ({{percentageStr $linesCovered $linesActive 1}}%)
+
+ {{/* Output a tables with a row for each source line*/}} + + {{range $lineIndex, $line := $sourceFile.Lines}} + {{/* Create a row for this source line */}} + + {{/* Output a cell for the line number */}} + + + {{/* Output two cells for the reverted/non-reverted execution status */}} + + + + {{/* Output a cell for the source line */}} + {{/* If a source line is "active", it has a source mapping so we mark it green/red */}} + {{/* If a source line is "covered", it is green, otherwise it is red. */}} + + + {{end}} +
{{add $lineIndex 1}} + {{if $line.IsCovered}} +
√ {{$line.SuccessHitCount}}
+ {{end}} +
+ {{if $line.IsCoveredReverted}} +
⟳ {{$line.RevertHitCount}}
+ {{end}} +
+ {{if not $line.IsActive}} +
{{printf "%s" $line.Contents}}
+ {{else if or $line.IsCovered $line.IsCoveredReverted}} +
{{printf "%s" $line.Contents}}
+ {{else}} +
{{printf "%s" $line.Contents}}
+ {{end}} +
+
+
+ {{end}} + + + + +
+ + + diff --git a/fuzzing/coverage/source_analysis.go b/fuzzing/coverage/source_analysis.go new file mode 100644 index 00000000..98c4bc75 --- /dev/null +++ b/fuzzing/coverage/source_analysis.go @@ -0,0 +1,431 @@ +package coverage + +import ( + "bytes" + "encoding/json" + "fmt" + "sort" + + "github.com/crytic/medusa/compilation/types" + "golang.org/x/exp/maps" +) + +// SourceAnalysis describes source code coverage across a list of compilations, after analyzing associated CoverageMaps. +type SourceAnalysis struct { + // Files describes the analysis results for a given source file path. + Files map[string]*SourceFileAnalysis +} + +// SortedFiles returns a list of Files within the SourceAnalysis, sorted by source file path in alphabetical order. +func (s *SourceAnalysis) SortedFiles() []*SourceFileAnalysis { + // Copy all source files from our analysis into a list. + sourceFiles := maps.Values(s.Files) + + // Sort source files by path + sort.Slice(sourceFiles, func(x, y int) bool { + return sourceFiles[x].Path < sourceFiles[y].Path + }) + + return sourceFiles +} + +// LineCount returns the count of lines across all source files. +func (s *SourceAnalysis) LineCount() int { + count := 0 + for _, file := range s.Files { + count += len(file.Lines) + } + return count +} + +// ActiveLineCount returns the count of lines that are marked executable/active across all source files. +func (s *SourceAnalysis) ActiveLineCount() int { + count := 0 + for _, file := range s.Files { + count += file.ActiveLineCount() + } + return count +} + +// CoveredLineCount returns the count of lines that were covered across all source files. +func (s *SourceAnalysis) CoveredLineCount() int { + count := 0 + for _, file := range s.Files { + count += file.CoveredLineCount() + } + return count +} + +// GenerateLCOVReport generates an LCOV report from the source analysis. +// The spec of the format is here https://github.com/linux-test-project/lcov/blob/07a1127c2b4390abf4a516e9763fb28a956a9ce4/man/geninfo.1#L989 +func (s *SourceAnalysis) GenerateLCOVReport() string { + var linesHit, linesInstrumented int + var buffer bytes.Buffer + buffer.WriteString("TN:\n") + for _, file := range s.SortedFiles() { + // SF: + buffer.WriteString(fmt.Sprintf("SF:%s\n", file.Path)) + for idx, line := range file.Lines { + if line.IsActive { + // DA:, + if line.IsCovered { + buffer.WriteString(fmt.Sprintf("DA:%d,%d\n", idx+1, line.SuccessHitCount)) + linesHit++ + } else { + buffer.WriteString(fmt.Sprintf("DA:%d,%d\n", idx+1, 0)) + } + linesInstrumented++ + } + } + // FN:, + // FNDA:, + for _, fn := range file.Functions { + byteStart := types.GetSrcMapStart(fn.Src) + length := types.GetSrcMapLength(fn.Src) + + startLine := sort.Search(len(file.CumulativeOffsetByLine), func(i int) bool { + return file.CumulativeOffsetByLine[i] > byteStart + }) + endLine := sort.Search(len(file.CumulativeOffsetByLine), func(i int) bool { + return file.CumulativeOffsetByLine[i] > byteStart+length + }) + + // We are treating any line hit in the definition as a hit for the function. + hit := 0 + for i := startLine; i < endLine; i++ { + // index iz zero based, line numbers are 1 based + if file.Lines[i-1].IsActive && file.Lines[i-1].IsCovered { + hit = 1 + } + + } + + // TODO: handle fallback, receive, and constructor + if fn.Name != "" { + buffer.WriteString(fmt.Sprintf("FN:%d,%s\n", startLine, fn.Name)) + buffer.WriteString(fmt.Sprintf("FNDA:%d,%s\n", hit, fn.Name)) + } + + } + buffer.WriteString("end_of_record\n") + } + + return buffer.String() +} + +// SourceFileAnalysis describes coverage information for a given source file. +type SourceFileAnalysis struct { + // Path describes the file path of the source file. This is kept here for access during report generation. + Path string + + // CumulativeOffsetByLine describes the cumulative byte offset for each line in the source file. + // For example, for a file with 5 lines, the list might look like: [0, 45, 98, 132, 189], where each number is the byte offset of the line's starting position + // This allows us to quickly determine which line a given byte offset falls within using a binary search. + CumulativeOffsetByLine []int + + // Lines describes information about a given source line and its coverage. + Lines []*SourceLineAnalysis + + // Functions is a list of functions defined in the source file + Functions []*types.FunctionDefinition +} + +// ActiveLineCount returns the count of lines that are marked executable/active within the source file. +func (s *SourceFileAnalysis) ActiveLineCount() int { + count := 0 + for _, line := range s.Lines { + if line.IsActive { + count++ + } + } + return count +} + +// CoveredLineCount returns the count of lines that were covered within the source file. +func (s *SourceFileAnalysis) CoveredLineCount() int { + count := 0 + for _, line := range s.Lines { + if line.IsCovered || line.IsCoveredReverted { + count++ + } + } + return count +} + +// SourceLineAnalysis describes coverage information for a specific source file line. +type SourceLineAnalysis struct { + // IsActive indicates the given source line was executable. + IsActive bool + + // Start describes the starting byte offset of the line in its parent source file. + Start int + + // End describes the ending byte offset of the line in its parent source file. + End int + + // Contents describe the bytes associated with the given source line. + Contents []byte + + // IsCovered indicates whether the source line has been executed without reverting. + IsCovered bool + + // SuccessHitCount describes how many times this line was executed successfully + SuccessHitCount uint + + // RevertHitCount describes how many times this line reverted during execution + RevertHitCount uint + + // IsCoveredReverted indicates whether the source line has been executed before reverting. + IsCoveredReverted bool +} + +// AnalyzeSourceCoverage takes a list of compilations and a set of coverage maps, and performs source analysis +// to determine source coverage information. +// Returns a SourceAnalysis object, or an error if one occurs. +func AnalyzeSourceCoverage(compilations []types.Compilation, coverageMaps *CoverageMaps) (*SourceAnalysis, error) { + // Create a new source analysis object + sourceAnalysis := &SourceAnalysis{ + Files: make(map[string]*SourceFileAnalysis), + } + + // Loop through all sources in all compilations to add them to our source file analysis container. + for _, compilation := range compilations { + for sourcePath := range compilation.SourcePathToArtifact { + // If we have no source code loaded for this source, skip it. + if _, ok := compilation.SourceCode[sourcePath]; !ok { + return nil, fmt.Errorf("could not perform source code analysis, code was not cached for '%v'", sourcePath) + } + + lines, cumulativeOffset := parseSourceLines(compilation.SourceCode[sourcePath]) + funcs := make([]*types.FunctionDefinition, 0) + + var ast types.AST + b, err := json.Marshal(compilation.SourcePathToArtifact[sourcePath].Ast) + if err != nil { + return nil, fmt.Errorf("could not encode AST from sources: %v", err) + } + err = json.Unmarshal(b, &ast) + if err != nil { + return nil, fmt.Errorf("could not parse AST from sources: %v", err) + } + + for _, node := range ast.Nodes { + + if node.GetNodeType() == "FunctionDefinition" { + fn := node.(types.FunctionDefinition) + funcs = append(funcs, &fn) + } + if node.GetNodeType() == "ContractDefinition" { + contract := node.(types.ContractDefinition) + if contract.Kind == types.ContractKindInterface { + continue + } + for _, subNode := range contract.Nodes { + if subNode.GetNodeType() == "FunctionDefinition" { + fn := subNode.(types.FunctionDefinition) + funcs = append(funcs, &fn) + } + } + } + + } + + // Obtain the parsed source code lines for this source. + if _, ok := sourceAnalysis.Files[sourcePath]; !ok { + sourceAnalysis.Files[sourcePath] = &SourceFileAnalysis{ + Path: sourcePath, + CumulativeOffsetByLine: cumulativeOffset, + Lines: lines, + Functions: funcs, + } + } + + } + } + + // Loop through all sources in all compilations to process coverage information. + for _, compilation := range compilations { + for _, source := range compilation.SourcePathToArtifact { + // Loop for each contract in this source + for _, contract := range source.Contracts { + // Skip interfaces. + if contract.Kind == types.ContractKindInterface { + continue + } + // Obtain coverage map data for this contract. + initCoverageMapData, err := coverageMaps.GetContractCoverageMap(contract.InitBytecode, true) + if err != nil { + return nil, fmt.Errorf("could not perform source code analysis due to error fetching init coverage map data: %v", err) + } + runtimeCoverageMapData, err := coverageMaps.GetContractCoverageMap(contract.RuntimeBytecode, false) + if err != nil { + return nil, fmt.Errorf("could not perform source code analysis due to error fetching runtime coverage map data: %v", err) + } + + // Parse the source map for this contract. + initSourceMap, err := types.ParseSourceMap(contract.SrcMapsInit) + if err != nil { + return nil, fmt.Errorf("could not perform source code analysis due to error fetching init source map: %v", err) + } + runtimeSourceMap, err := types.ParseSourceMap(contract.SrcMapsRuntime) + if err != nil { + return nil, fmt.Errorf("could not perform source code analysis due to error fetching runtime source map: %v", err) + } + + // Parse our instruction index to offset lookups + initInstructionOffsetLookup, err := initSourceMap.GetInstructionIndexToOffsetLookup(contract.InitBytecode) + if err != nil { + return nil, fmt.Errorf("could not perform source code analysis due to error parsing init byte code: %v", err) + } + runtimeInstructionOffsetLookup, err := runtimeSourceMap.GetInstructionIndexToOffsetLookup(contract.RuntimeBytecode) + if err != nil { + return nil, fmt.Errorf("could not perform source code analysis due to error parsing runtime byte code: %v", err) + } + + // Filter our source maps + initSourceMap = filterSourceMaps(compilation, initSourceMap) + runtimeSourceMap = filterSourceMaps(compilation, runtimeSourceMap) + + // Analyze both init and runtime coverage for our source lines. + err = analyzeContractSourceCoverage(compilation, sourceAnalysis, initSourceMap, initInstructionOffsetLookup, initCoverageMapData) + if err != nil { + return nil, err + } + err = analyzeContractSourceCoverage(compilation, sourceAnalysis, runtimeSourceMap, runtimeInstructionOffsetLookup, runtimeCoverageMapData) + if err != nil { + return nil, err + } + } + } + } + return sourceAnalysis, nil +} + +// analyzeContractSourceCoverage takes a compilation, a SourceAnalysis, the source map they were derived from, +// a lookup of instruction index->offset, and coverage map data. It updates the coverage source line mapping with +// coverage data, after analyzing the coverage data for the given file in the given compilation. +// Returns an error if one occurs. +func analyzeContractSourceCoverage(compilation types.Compilation, sourceAnalysis *SourceAnalysis, sourceMap types.SourceMap, instructionOffsetLookup []int, contractCoverageData *ContractCoverageMap) error { + // Loop through each source map element + for _, sourceMapElement := range sourceMap { + // If this source map element doesn't map to any file (compiler generated inline code), it will have no + // relevance to the coverage map, so we skip it. + if sourceMapElement.SourceUnitID == -1 { + continue + } + + // Obtain our source for this file ID + sourcePath, idExists := compilation.SourceIdToPath[sourceMapElement.SourceUnitID] + + // TODO: We may also go out of bounds because this maps to a "generated source" which we do not have. + // For now, we silently skip these cases. + if !idExists { + continue + } + + // Capture the hit count of the source map element. + succHitCount := uint(0) + revertHitCount := uint(0) + if contractCoverageData != nil { + succHitCount = contractCoverageData.successfulCoverage.HitCount(instructionOffsetLookup[sourceMapElement.Index]) + revertHitCount = contractCoverageData.revertedCoverage.HitCount(instructionOffsetLookup[sourceMapElement.Index]) + } + + // Obtain the source file this element maps to. + if sourceFile, ok := sourceAnalysis.Files[sourcePath]; ok { + // Mark all lines which fall within this range. + start := sourceMapElement.Offset + + startLine := sort.Search(len(sourceFile.CumulativeOffsetByLine), func(i int) bool { + return sourceFile.CumulativeOffsetByLine[i] > start + }) + + // index iz zero based, line numbers are 1 based + sourceLine := sourceFile.Lines[startLine-1] + + // Check if the line is within range + if sourceMapElement.Offset < sourceLine.End { + // Mark the line active/executable. + sourceLine.IsActive = true + + // Set its coverage state and increment hit counts + sourceLine.SuccessHitCount += succHitCount + sourceLine.RevertHitCount += revertHitCount + sourceLine.IsCovered = sourceLine.IsCovered || sourceLine.SuccessHitCount > 0 + sourceLine.IsCoveredReverted = sourceLine.IsCoveredReverted || sourceLine.RevertHitCount > 0 + + } + } else { + return fmt.Errorf("could not perform source code analysis, missing source '%v'", sourcePath) + } + + } + return nil +} + +// filterSourceMaps takes a given source map and filters it so overlapping (superset) source map elements are removed. +// In addition to any which do not map to any source code. This is necessary as some source map entries select an +// entire method definition. +// Returns the filtered source map. +func filterSourceMaps(compilation types.Compilation, sourceMap types.SourceMap) types.SourceMap { + // Create our resulting source map + filteredMap := make(types.SourceMap, 0) + + // Loop for each source map entry and determine if it should be included. + for i, sourceMapElement := range sourceMap { + // Verify this file ID is not out of bounds for a source file index + if _, exists := compilation.SourceIdToPath[sourceMapElement.SourceUnitID]; !exists { + // TODO: We may also go out of bounds because this maps to a "generated source" which we do not have. + // For now, we silently skip these cases. + continue + } + + // Verify this source map does not overlap another + encapsulatesOtherMapping := false + for x, sourceMapElement2 := range sourceMap { + if i != x && sourceMapElement.SourceUnitID == sourceMapElement2.SourceUnitID && + !(sourceMapElement.Offset == sourceMapElement2.Offset && sourceMapElement.Length == sourceMapElement2.Length) { + if sourceMapElement2.Offset >= sourceMapElement.Offset && + sourceMapElement2.Offset+sourceMapElement2.Length <= sourceMapElement.Offset+sourceMapElement.Length { + encapsulatesOtherMapping = true + break + } + } + } + + if !encapsulatesOtherMapping { + filteredMap = append(filteredMap, sourceMapElement) + } + } + return filteredMap +} + +// parseSourceLines splits the provided source code into SourceLineAnalysis objects. +// Returns the SourceLineAnalysis objects. +func parseSourceLines(sourceCode []byte) ([]*SourceLineAnalysis, []int) { + // Create our lines and a variable to track where our current line start offset is. + var lines []*SourceLineAnalysis + var lineStart int + var cumulativeOffset []int + + // Split the source code on new line characters + sourceCodeLinesBytes := bytes.Split(sourceCode, []byte("\n")) + + // For each source code line, initialize a struct that defines its start/end offsets, set its contents. + for i := 0; i < len(sourceCodeLinesBytes); i++ { + lineEnd := lineStart + len(sourceCodeLinesBytes[i]) + 1 + lines = append(lines, &SourceLineAnalysis{ + IsActive: false, + Start: lineStart, + End: lineEnd, + Contents: sourceCodeLinesBytes[i], + IsCovered: false, + IsCoveredReverted: false, + }) + cumulativeOffset = append(cumulativeOffset, int(lineStart)) + lineStart = lineEnd + } + + // Return the resulting lines + return lines, cumulativeOffset +} diff --git a/fuzzing/executiontracer/execution_trace.go b/fuzzing/executiontracer/execution_trace.go index 6ae459d7..76953094 100644 --- a/fuzzing/executiontracer/execution_trace.go +++ b/fuzzing/executiontracer/execution_trace.go @@ -2,14 +2,20 @@ package executiontracer import ( "encoding/hex" + "errors" "fmt" + "regexp" + "strings" + + "github.com/crytic/medusa/chain" "github.com/crytic/medusa/compilation/abiutils" "github.com/crytic/medusa/fuzzing/contracts" "github.com/crytic/medusa/fuzzing/valuegeneration" + "github.com/crytic/medusa/logging" + "github.com/crytic/medusa/logging/colors" "github.com/ethereum/go-ethereum/accounts/abi" coreTypes "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/vm" - "strings" ) // ExecutionTrace contains information recorded by an ExecutionTracer. It contains information about each call @@ -32,13 +38,18 @@ func newExecutionTrace(contracts contracts.Contracts) *ExecutionTrace { } } -// generateCallFrameEnterString generates a header string to print for the given call frame. It contains -// information about the invoked call. -// Returns the header string -func (t *ExecutionTrace) generateCallFrameEnterString(callFrame *CallFrame) string { - // Define some strings that represent our current call frame +// generateCallFrameEnterElements generates a list of elements describing top level information about this call frame. +// This list of elements will hold information about what kind of call it is, wei sent, what method is called, and more. +// Additionally, the list may also hold formatting options for console output. This function also returns a non-empty +// string in case this call frame represents a call to the console.log precompile contract. +func (t *ExecutionTrace) generateCallFrameEnterElements(callFrame *CallFrame) ([]any, string) { + // Create list of elements and console log string + elements := make([]any, 0) + var consoleLogString string + + // Define some strings and objects that represent our current call frame var ( - callType = "call" + callType = []any{colors.BlueBold, "[call] ", colors.Reset} proxyContractName = "" codeContractName = "" methodName = "" @@ -46,13 +57,16 @@ func (t *ExecutionTrace) generateCallFrameEnterString(callFrame *CallFrame) stri err error ) - // If this is a contract creation, use a different prefix + // If this is a contract creation or proxy call, use different formatting for call type if callFrame.IsContractCreation() { - callType = "creation" + callType = []any{colors.YellowBold, "[creation] ", colors.Reset} } else if callFrame.IsProxyCall() { - callType = "proxy call" + callType = []any{colors.CyanBold, "[proxy call] ", colors.Reset} } + // Append the formatted call type information to the list of elements + elements = append(elements, callType...) + // Resolve our contract names, as well as our method and its name from the code contract. if callFrame.ToContractAbi != nil { proxyContractName = callFrame.ToContractName @@ -65,7 +79,7 @@ func (t *ExecutionTrace) generateCallFrameEnterString(callFrame *CallFrame) stri } else { method, err = callFrame.CodeContractAbi.MethodById(callFrame.InputData) if err == nil { - methodName = method.Name + methodName = method.Sig } } } @@ -87,10 +101,31 @@ func (t *ExecutionTrace) generateCallFrameEnterString(callFrame *CallFrame) stri // Unpack our input values and obtain a string to represent them inputValues, err := method.Inputs.Unpack(abiDataInputBuffer) if err == nil { + // Encode the ABI arguments into strings encodedInputString, err := valuegeneration.EncodeABIArgumentsToString(method.Inputs, inputValues) if err == nil { inputArgumentsDisplayText = &encodedInputString } + + // If the call was made to the console log precompile address, let's retrieve the log and format it + if callFrame.ToAddress == chain.ConsoleLogContractAddress { + // First, attempt to do string formatting if the first element is a string, has a percent sign in it, + // and there is at least one argument provided for formatting. + exp := regexp.MustCompile(`%`) + stringInput, isString := inputValues[0].(string) + if isString && exp.MatchString(stringInput) && len(inputValues) > 1 { + // Format the string and add it to the list of logs + consoleLogString = fmt.Sprintf(inputValues[0].(string), inputValues[1:]...) + } else { + // The string does not need to be formatted, and we can just use the encoded input string + consoleLogString = encodedInputString + } + + // Add a bullet point before the string and a new line after the string + if len(consoleLogString) > 0 { + consoleLogString = colors.BULLET_POINT + " " + consoleLogString + "\n" + } + } } } @@ -103,25 +138,37 @@ func (t *ExecutionTrace) generateCallFrameEnterString(callFrame *CallFrame) stri // Generate the message we wish to output finally, using all these display string components. // If we executed code, attach additional context such as the contract name, method, etc. + var callInfo string if callFrame.IsProxyCall() { if callFrame.ExecutedCode { - return fmt.Sprintf("[%v] %v -> %v.%v(%v) (addr=%v, code=%v, value=%v, sender=%v)", callType, proxyContractName, codeContractName, methodName, *inputArgumentsDisplayText, callFrame.ToAddress.String(), callFrame.CodeAddress.String(), callFrame.CallValue, callFrame.SenderAddress.String()) + callInfo = fmt.Sprintf("%v -> %v.%v(%v) (addr=%v, code=%v, value=%v, sender=%v)", proxyContractName, codeContractName, methodName, *inputArgumentsDisplayText, callFrame.ToAddress.String(), callFrame.CodeAddress.String(), callFrame.CallValue, callFrame.SenderAddress.String()) } else { - return fmt.Sprintf("[%v] (addr=%v, value=%v, sender=%v)", callType, callFrame.ToAddress.String(), callFrame.CallValue, callFrame.SenderAddress.String()) + callInfo = fmt.Sprintf("(addr=%v, value=%v, sender=%v)", callFrame.ToAddress.String(), callFrame.CallValue, callFrame.SenderAddress.String()) } } else { if callFrame.ExecutedCode { - return fmt.Sprintf("[%v] %v.%v(%v) (addr=%v, value=%v, sender=%v)", callType, codeContractName, methodName, *inputArgumentsDisplayText, callFrame.ToAddress.String(), callFrame.CallValue, callFrame.SenderAddress.String()) + if callFrame.ToAddress == chain.ConsoleLogContractAddress { + callInfo = fmt.Sprintf("%v.%v(%v)", codeContractName, methodName, *inputArgumentsDisplayText) + } else { + callInfo = fmt.Sprintf("%v.%v(%v) (addr=%v, value=%v, sender=%v)", codeContractName, methodName, *inputArgumentsDisplayText, callFrame.ToAddress.String(), callFrame.CallValue, callFrame.SenderAddress.String()) + } } else { - return fmt.Sprintf("[%v] (addr=%v, value=%v, sender=%v)", callType, callFrame.ToAddress.String(), callFrame.CallValue, callFrame.SenderAddress.String()) + callInfo = fmt.Sprintf("(addr=%v, value=%v, sender=%v)", callFrame.ToAddress.String(), callFrame.CallValue, callFrame.SenderAddress.String()) } } + + // Add call information to the elements + elements = append(elements, callInfo, "\n") + + return elements, consoleLogString } -// generateCallFrameExitString generates a footer string to print for the given call frame. It contains -// result information about the call. -// Returns the footer string. -func (t *ExecutionTrace) generateCallFrameExitString(callFrame *CallFrame) string { +// generateCallFrameExitElements generates a list of elements describing the return data of the call frame (e.g. +// traditional return data, assertion failure, revert data, etc.). Additionally, the list may also hold formatting options for console output. +func (t *ExecutionTrace) generateCallFrameExitElements(callFrame *CallFrame) []any { + // Create list of elements + elements := make([]any, 0) + // Define some strings that represent our current call frame var method *abi.Method @@ -151,26 +198,34 @@ func (t *ExecutionTrace) generateCallFrameExitString(callFrame *CallFrame) strin // If we could not correctly obtain the unpacked arguments in a nice display string (due to not having a resolved // contract or method definition, or failure to unpack), we display as raw data in the worst case. - if outputArgumentsDisplayText == nil { + // TODO: Fix if return data is empty len byte array + if outputArgumentsDisplayText == nil && len(callFrame.ReturnData) > 0 { temp := fmt.Sprintf("return_data=%v", hex.EncodeToString(callFrame.ReturnData)) outputArgumentsDisplayText = &temp } // Wrap our return message and output it at the end. if callFrame.ReturnError == nil { - return fmt.Sprintf("[return (%v)]", *outputArgumentsDisplayText) + if outputArgumentsDisplayText != nil { + elements = append(elements, colors.GreenBold, fmt.Sprintf("[return (%v)]", *outputArgumentsDisplayText), colors.Reset, "\n") + } else { + elements = append(elements, colors.GreenBold, "[return]", colors.Reset, "\n") + } + return elements } // Try to resolve a panic message and check if it signals a failed assertion. panicCode := abiutils.GetSolidityPanicCode(callFrame.ReturnError, callFrame.ReturnData, true) - if panicCode != nil && panicCode.Uint64() == abiutils.PanicCodeAssertFailed { - return "[assertion failed]" + if panicCode != nil { + elements = append(elements, colors.RedBold, fmt.Sprintf("[%v]", abiutils.GetPanicReason(panicCode.Uint64())), colors.Reset, "\n") + return elements } // Try to resolve an assertion failed panic code. errorMessage := abiutils.GetSolidityRevertErrorString(callFrame.ReturnError, callFrame.ReturnData) if errorMessage != nil { - return fmt.Sprintf("[revert ('%v')]", *errorMessage) + elements = append(elements, colors.RedBold, fmt.Sprintf("[revert ('%v')]", *errorMessage), colors.Reset, "\n") + return elements } // Try to unpack a custom Solidity error from the return values. @@ -178,23 +233,28 @@ func (t *ExecutionTrace) generateCallFrameExitString(callFrame *CallFrame) strin if matchedCustomError != nil { customErrorArgsDisplayText, err := valuegeneration.EncodeABIArgumentsToString(matchedCustomError.Inputs, unpackedCustomErrorArgs) if err == nil { - return fmt.Sprintf("[revert (error: %v(%v))]", matchedCustomError.Name, customErrorArgsDisplayText) + elements = append(elements, colors.RedBold, fmt.Sprintf("[revert (error: %v(%v))]", matchedCustomError.Name, customErrorArgsDisplayText), colors.Reset, "\n") + return elements } } // Check if this is a generic revert. - if callFrame.ReturnError == vm.ErrExecutionReverted { - return "[revert]" + if errors.Is(callFrame.ReturnError, vm.ErrExecutionReverted) { + elements = append(elements, colors.RedBold, "[revert]", colors.Reset, "\n") + return elements } // If we could not resolve any custom error, we simply print out the generic VM error message. - return fmt.Sprintf("[vm error ('%v')]", callFrame.ReturnError.Error()) + elements = append(elements, colors.RedBold, fmt.Sprintf("[vm error ('%v')]", callFrame.ReturnError.Error()), colors.Reset, "\n") + return elements } -// generateEventEmittedString generates a string used to express an event emission. It contains information about an -// event log. -// Returns a string representing an event emission. -func (t *ExecutionTrace) generateEventEmittedString(callFrame *CallFrame, eventLog *coreTypes.Log) string { +// generateEventEmittedElements generates a list of elements used to express an event emission. It contains information about an +// event log such as the topics and the event data. Additionally, the list may also hold formatting options for console output. +func (t *ExecutionTrace) generateEventEmittedElements(callFrame *CallFrame, eventLog *coreTypes.Log) []any { + // Create list of elements + elements := make([]any, 0) + // If this is an event log, match it in our contract's ABI. var eventDisplayText *string @@ -235,26 +295,35 @@ func (t *ExecutionTrace) generateEventEmittedString(callFrame *CallFrame, eventL } // Finally, add our output line with this event data to it. - return fmt.Sprintf("[event] %v", *eventDisplayText) + elements = append(elements, colors.MagentaBold, "[event] ", colors.Reset, *eventDisplayText, "\n") + return elements } -// generateStringsForCallFrame generates indented strings for a given call frame and its children. -// Returns the list of strings, to be joined by new line separators. -func (t *ExecutionTrace) generateStringsForCallFrame(currentDepth int, callFrame *CallFrame) []string { - // Create our resulting strings array - var outputLines []string +// generateElementsAndLogsForCallFrame generates a list of elements and logs for a given call frame and its children. +// The list of elements may also hold formatting options for console output. The list of logs represent calls to the +// console.log precompile contract. +func (t *ExecutionTrace) generateElementsAndLogsForCallFrame(currentDepth int, callFrame *CallFrame) ([]any, []any) { + // Create list of elements and logs + elements := make([]any, 0) + consoleLogs := make([]any, 0) // Create our current call line prefix (indented by call depth) - prefix := strings.Repeat("\t", currentDepth) + " -> " + prefix := strings.Repeat("\t", currentDepth) + " => " // If we're printing the root frame, add the overall execution trace header. if currentDepth == 0 { - outputLines = append(outputLines, prefix+"[Execution Trace]") + elements = append(elements, colors.Bold, "[Execution Trace]", colors.Reset, "\n") } - // Add the call frame enter header - header := prefix + t.generateCallFrameEnterString(callFrame) - outputLines = append(outputLines, header) + // Add the call frame enter header elements + newElements, consoleLogString := t.generateCallFrameEnterElements(callFrame) + elements = append(elements, prefix) + elements = append(elements, newElements...) + + // If this call frame was a console.log contract call, add the string to the list of logs + if len(consoleLogString) > 0 { + consoleLogs = append(consoleLogs, consoleLogString) + } // Now that the header has been printed, create our indent level to express everything that // happened under it. @@ -268,31 +337,53 @@ func (t *ExecutionTrace) generateStringsForCallFrame(currentDepth int, callFrame for _, operation := range callFrame.Operations { if childCallFrame, ok := operation.(*CallFrame); ok { // If this is a call frame being entered, generate information recursively. - childOutputLines := t.generateStringsForCallFrame(currentDepth+1, childCallFrame) - outputLines = append(outputLines, childOutputLines...) + childOutputLines, childConsoleLogStrings := t.generateElementsAndLogsForCallFrame(currentDepth+1, childCallFrame) + elements = append(elements, childOutputLines...) + consoleLogs = append(consoleLogs, childConsoleLogStrings...) } else if eventLog, ok := operation.(*coreTypes.Log); ok { // If an event log was emitted, add a message for it. - eventMessage := prefix + t.generateEventEmittedString(callFrame, eventLog) - outputLines = append(outputLines, eventMessage) + elements = append(elements, prefix) + elements = append(elements, t.generateEventEmittedElements(callFrame, eventLog)...) } } // If we self-destructed, add a message for it before our footer. if callFrame.SelfDestructed { - outputLines = append(outputLines, fmt.Sprintf("%v[selfdestruct]", prefix)) + elements = append(elements, prefix, colors.RedBold, "[selfdestruct]", colors.Reset, "\n") } // Add the call frame exit footer - footer := prefix + t.generateCallFrameExitString(callFrame) - outputLines = append(outputLines, footer) + elements = append(elements, prefix) + elements = append(elements, t.generateCallFrameExitElements(callFrame)...) + + } + + // Return our elements + return elements, consoleLogs +} + +// Log returns a logging.LogBuffer that represents this execution trace. This buffer will be passed to the underlying +// logger which will format it accordingly for console or file. +func (t *ExecutionTrace) Log() *logging.LogBuffer { + // Create a buffer + buffer := logging.NewLogBuffer() + + // First, add the elements that make up the overarching execution trace + elements, logs := t.generateElementsAndLogsForCallFrame(0, t.TopLevelCallFrame) + buffer.Append(elements...) + + // If we captured any logs during tracing, add them to the overarching execution trace + if len(logs) > 0 { + buffer.Append(colors.Bold, "[Logs]", colors.Reset, "\n") + buffer.Append(logs...) } - // Return our output lines - return outputLines + return buffer } -// String returns a string representation of the execution trace. +// String returns the string representation of this execution trace func (t *ExecutionTrace) String() string { - outputLines := t.generateStringsForCallFrame(0, t.TopLevelCallFrame) - return strings.Join(outputLines, "\n") + // Internally, we just call the log function, get the list of elements and create their non-colorized string representation + // Might be useful for 3rd party apps + return t.Log().String() } diff --git a/fuzzing/executiontracer/execution_tracer.go b/fuzzing/executiontracer/execution_tracer.go index fdf6c08d..0b876c0c 100644 --- a/fuzzing/executiontracer/execution_tracer.go +++ b/fuzzing/executiontracer/execution_tracer.go @@ -1,31 +1,39 @@ package executiontracer import ( + "math/big" + "github.com/crytic/medusa/chain" "github.com/crytic/medusa/fuzzing/contracts" + "github.com/crytic/medusa/utils" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core" "github.com/ethereum/go-ethereum/core/state" + "github.com/ethereum/go-ethereum/core/tracing" + coretypes "github.com/ethereum/go-ethereum/core/types" + "github.com/ethereum/go-ethereum/core/vm" + "github.com/ethereum/go-ethereum/eth/tracers" "golang.org/x/exp/slices" - "math/big" ) // CallWithExecutionTrace obtains an execution trace for a given call, on the provided chain, using the state // provided. If a nil state is provided, the current chain state will be used. // Returns the ExecutionTrace for the call or an error if one occurs. -func CallWithExecutionTrace(chain *chain.TestChain, contractDefinitions contracts.Contracts, msg core.Message, state *state.StateDB) (*core.ExecutionResult, *ExecutionTrace, error) { +func CallWithExecutionTrace(testChain *chain.TestChain, contractDefinitions contracts.Contracts, msg *core.Message, state *state.StateDB) (*core.ExecutionResult, *ExecutionTrace, error) { // Create an execution tracer - executionTracer := NewExecutionTracer(contractDefinitions, chain.CheatCodeContracts()) + executionTracer := NewExecutionTracer(contractDefinitions, testChain.CheatCodeContracts()) + defer executionTracer.Close() // Call the contract on our chain with the provided state. - executionResult, err := chain.CallContract(msg, state, executionTracer) + executionResult, err := testChain.CallContract(msg, state, executionTracer.NativeTracer()) if err != nil { return nil, nil, err } // Obtain our trace - trace := executionTracer.Trace() + hash := utils.MessageToTransaction(msg).Hash() + trace := executionTracer.GetTrace(hash) // Return the trace return executionResult, trace, nil @@ -34,15 +42,14 @@ func CallWithExecutionTrace(chain *chain.TestChain, contractDefinitions contract // ExecutionTracer records execution information into an ExecutionTrace, containing information about each call // scope entered and exited. type ExecutionTracer struct { - // callDepth refers to the current EVM depth during tracing. - callDepth uint64 - // evm refers to the EVM instance last captured. - evm *vm.EVM + evmContext *tracing.VMContext // trace represents the current execution trace captured by this tracer. trace *ExecutionTrace + traceMap map[common.Hash]*ExecutionTrace + // currentCallFrame references the current call frame being traced. currentCallFrame *CallFrame @@ -52,11 +59,13 @@ type ExecutionTracer struct { // cheatCodeContracts represents the cheat code contract definitions to match for execution traces. cheatCodeContracts map[common.Address]*chain.CheatCodeContract - // onNextCaptureState refers to methods which should be executed the next time CaptureState executes. - // CaptureState is called prior to execution of an instruction. This allows actions to be performed + // onNextCaptureState refers to methods which should be executed the next time OnOpcode executes. + // OnOpcode is called prior to execution of an instruction. This allows actions to be performed // after some state is captured, on the next state capture (e.g. detecting a log instruction, but // using this structure to execute code later once the log is committed). onNextCaptureState []func() + + nativeTracer *chain.TestChainTracer } // NewExecutionTracer creates a ExecutionTracer and returns it. @@ -64,30 +73,65 @@ func NewExecutionTracer(contractDefinitions contracts.Contracts, cheatCodeContra tracer := &ExecutionTracer{ contractDefinitions: contractDefinitions, cheatCodeContracts: cheatCodeContracts, + traceMap: make(map[common.Hash]*ExecutionTrace), + } + innerTracer := &tracers.Tracer{ + Hooks: &tracing.Hooks{ + OnTxStart: tracer.OnTxStart, + OnEnter: tracer.OnEnter, + OnTxEnd: tracer.OnTxEnd, + OnExit: tracer.OnExit, + OnOpcode: tracer.OnOpcode, + }, } + tracer.nativeTracer = &chain.TestChainTracer{Tracer: innerTracer, CaptureTxEndSetAdditionalResults: nil} + return tracer } -// Trace returns the currently recording or last recorded execution trace by the tracer. -func (t *ExecutionTracer) Trace() *ExecutionTrace { - return t.trace +// NativeTracer returns the underlying TestChainTracer. +func (t *ExecutionTracer) NativeTracer() *chain.TestChainTracer { + return t.nativeTracer + } -// CaptureTxStart is called upon the start of transaction execution, as defined by vm.EVMLogger. -func (t *ExecutionTracer) CaptureTxStart(gasLimit uint64) { +// Close sets the traceMap to nil and should be called after the execution tracer is finish being used. +func (t *ExecutionTracer) Close() { + t.traceMap = nil +} + +// GetTrace returns the currently recording or last recorded execution trace by the tracer. +func (t *ExecutionTracer) GetTrace(txHash common.Hash) *ExecutionTrace { + if trace, ok := t.traceMap[txHash]; ok { + return trace + } + return nil +} + +// OnTxEnd is called upon the end of transaction execution, as defined by tracers.Tracer. +func (t *ExecutionTracer) OnTxEnd(receipt *coretypes.Receipt, err error) { + // We avoid storing the trace for this transaction. An error should realistically only occur if we hit a block gas + // limit error. In this case, the transaction will be retried in the next block and we can retrieve the trace at + // that time. + if err != nil || receipt == nil { + return + } + t.traceMap[receipt.TxHash] = t.trace +} + +// OnTxStart is called upon the start of transaction execution, as defined by tracers.Tracer. +func (t *ExecutionTracer) OnTxStart(vm *tracing.VMContext, tx *coretypes.Transaction, from common.Address) { // Reset our capture state - t.callDepth = 0 t.trace = newExecutionTrace(t.contractDefinitions) t.currentCallFrame = nil t.onNextCaptureState = nil -} - -// CaptureTxEnd is called upon the end of transaction execution, as defined by vm.EVMLogger. -func (t *ExecutionTracer) CaptureTxEnd(restGas uint64) { + t.traceMap = make(map[common.Hash]*ExecutionTrace) + // Store our evm reference + t.evmContext = vm } -// resolveConstructorArgs resolves previously unresolved constructor argument ABI data from the call data, if +// resolveCallFrameConstructorArgs resolves previously unresolved constructor argument ABI data from the call data, if // the call frame provided represents a contract deployment. func (t *ExecutionTracer) resolveCallFrameConstructorArgs(callFrame *CallFrame, contract *contracts.Contract) { // If this is a contract creation and the constructor ABI argument data has not yet been resolved, do so now. @@ -118,6 +162,12 @@ func (t *ExecutionTracer) resolveCallFrameContractDefinitions(callFrame *CallFra callFrame.ToContractName = toContract.Name() callFrame.ToContractAbi = &toContract.CompiledContract().Abi t.resolveCallFrameConstructorArgs(callFrame, toContract) + + // If this is a contract creation, set the code address to the address of the contract we just deployed. + if callFrame.IsContractCreation() { + callFrame.CodeContractName = toContract.Name() + callFrame.CodeContractAbi = &toContract.CompiledContract().Abi + } } } } @@ -135,6 +185,7 @@ func (t *ExecutionTracer) resolveCallFrameContractDefinitions(callFrame *CallFra if codeContract != nil { callFrame.CodeContractName = codeContract.Name() callFrame.CodeContractAbi = &codeContract.CompiledContract().Abi + callFrame.ExecutedCode = true } } } @@ -145,12 +196,12 @@ func (t *ExecutionTracer) captureEnteredCallFrame(fromAddress common.Address, to // Create our call frame struct to track data for this call frame we entered. callFrameData := &CallFrame{ SenderAddress: fromAddress, - ToAddress: toAddress, + ToAddress: toAddress, // Note: Set temporarily, overwritten if code executes (in OnOpcode) and the contract's address is overridden by delegatecall. ToContractName: "", ToContractAbi: nil, ToInitBytecode: nil, ToRuntimeBytecode: nil, - CodeAddress: toAddress, // Note: Set temporarily, overwritten if code executes (in CaptureState). + CodeAddress: toAddress, CodeContractName: "", CodeContractAbi: nil, CodeRuntimeBytecode: nil, @@ -185,7 +236,7 @@ func (t *ExecutionTracer) captureExitedCallFrame(output []byte, err error) { if t.currentCallFrame.ToRuntimeBytecode == nil { // As long as this isn't a failed contract creation, we should be able to fetch "to" byte code on exit. if !t.currentCallFrame.IsContractCreation() || err == nil { - t.currentCallFrame.ToRuntimeBytecode = t.evm.StateDB.GetCode(t.currentCallFrame.ToAddress) + t.currentCallFrame.ToRuntimeBytecode = t.evmContext.StateDB.GetCode(t.currentCallFrame.ToAddress) } } if t.currentCallFrame.CodeRuntimeBytecode == nil { @@ -194,7 +245,7 @@ func (t *ExecutionTracer) captureExitedCallFrame(output []byte, err error) { if t.currentCallFrame.CodeAddress == t.currentCallFrame.ToAddress { t.currentCallFrame.CodeRuntimeBytecode = t.currentCallFrame.ToRuntimeBytecode } else { - t.currentCallFrame.CodeRuntimeBytecode = t.evm.StateDB.GetCode(t.currentCallFrame.CodeAddress) + t.currentCallFrame.CodeRuntimeBytecode = t.evmContext.StateDB.GetCode(t.currentCallFrame.CodeAddress) } } @@ -209,41 +260,20 @@ func (t *ExecutionTracer) captureExitedCallFrame(output []byte, err error) { t.currentCallFrame = t.currentCallFrame.ParentCallFrame } -// CaptureStart initializes the tracing operation for the top of a call frame, as defined by vm.EVMLogger. -func (t *ExecutionTracer) CaptureStart(env *vm.EVM, from common.Address, to common.Address, create bool, input []byte, gas uint64, value *big.Int) { - // Store our evm reference - t.evm = env - +// OnEnter initializes the tracing operation for the top of a call frame, as defined by tracers.Tracer. +func (t *ExecutionTracer) OnEnter(depth int, typ byte, from common.Address, to common.Address, input []byte, gas uint64, value *big.Int) { // Capture that a new call frame was entered. - t.captureEnteredCallFrame(from, to, input, create, value) + t.captureEnteredCallFrame(from, to, input, (typ == byte(vm.CREATE) || typ == byte(vm.CREATE2)), value) } -// CaptureEnd is called after a call to finalize tracing completes for the top of a call frame, as defined by vm.EVMLogger. -func (t *ExecutionTracer) CaptureEnd(output []byte, gasUsed uint64, err error) { +// OnExit is called after a call to finalize tracing completes for the top of a call frame, as defined by tracers.Tracer. +func (t *ExecutionTracer) OnExit(depth int, output []byte, gasUsed uint64, err error, reverted bool) { // Capture that the call frame was exited. t.captureExitedCallFrame(output, err) } -// CaptureEnter is called upon entering of the call frame, as defined by vm.EVMLogger. -func (t *ExecutionTracer) CaptureEnter(typ vm.OpCode, from common.Address, to common.Address, input []byte, gas uint64, value *big.Int) { - // Increase our call depth now that we're entering a new call frame. - t.callDepth++ - - // Capture that a new call frame was entered. - t.captureEnteredCallFrame(from, to, input, typ == vm.CREATE || typ == vm.CREATE2, value) -} - -// CaptureExit is called upon exiting of the call frame, as defined by vm.EVMLogger. -func (t *ExecutionTracer) CaptureExit(output []byte, gasUsed uint64, err error) { - // Capture that the call frame was exited. - t.captureExitedCallFrame(output, err) - - // Decrease our call depth now that we've exited a call frame. - t.callDepth-- -} - -// CaptureState records data from an EVM state update, as defined by vm.EVMLogger. -func (t *ExecutionTracer) CaptureState(pc uint64, op vm.OpCode, gas, cost uint64, scope *vm.ScopeContext, rData []byte, depth int, vmErr error) { +// OnOpcode records data from an EVM state update, as defined by tracers.Tracer. +func (t *ExecutionTracer) OnOpcode(pc uint64, op byte, gas, cost uint64, scope tracing.OpContext, rData []byte, depth int, err error) { // Execute all "on next capture state" events and clear them. for _, eventHandler := range t.onNextCaptureState { eventHandler() @@ -255,34 +285,27 @@ func (t *ExecutionTracer) CaptureState(pc uint64, op vm.OpCode, gas, cost uint64 // be appropriately represented in this structure. The information populated earlier on frame enter represents // the raw call data, before delegate transformations are applied, etc. if !t.currentCallFrame.ExecutedCode { - t.currentCallFrame.SenderAddress = scope.Contract.CallerAddress - t.currentCallFrame.ToAddress = scope.Contract.Address() - if scope.Contract.CodeAddr != nil { - t.currentCallFrame.CodeAddress = *scope.Contract.CodeAddr - } - + t.currentCallFrame.SenderAddress = scope.Caller() + // This is not always the "to" address, but the current address e.g. for delegatecall. + t.currentCallFrame.ToAddress = scope.Address() // Mark code as having executed in this scope, so we don't set these values again (as cheat codes may affect it). // We also want to know if a given call scope executed code, or simply represented a value transfer call. t.currentCallFrame.ExecutedCode = true } // If we encounter a SELFDESTRUCT operation, record the operation. - if op == vm.SELFDESTRUCT { + if op == byte(vm.SELFDESTRUCT) { t.currentCallFrame.SelfDestructed = true } // If a log operation occurred, add a deferred operation to capture it. - if op == vm.LOG0 || op == vm.LOG1 || op == vm.LOG2 || op == vm.LOG3 || op == vm.LOG4 { + // TODO: Move this to OnLog + if op == byte(vm.LOG0) || op == byte(vm.LOG1) || op == byte(vm.LOG2) || op == byte(vm.LOG3) || op == byte(vm.LOG4) { t.onNextCaptureState = append(t.onNextCaptureState, func() { - logs := t.evm.StateDB.(*state.StateDB).Logs() + logs := t.evmContext.StateDB.(*state.StateDB).Logs() if len(logs) > 0 { t.currentCallFrame.Operations = append(t.currentCallFrame.Operations, logs[len(logs)-1]) } }) } } - -// CaptureFault records an execution fault, as defined by vm.EVMLogger. -func (t *ExecutionTracer) CaptureFault(pc uint64, op vm.OpCode, gas, cost uint64, scope *vm.ScopeContext, depth int, err error) { - -} diff --git a/fuzzing/fuzzer.go b/fuzzing/fuzzer.go index d65fcd67..4d862dc0 100644 --- a/fuzzing/fuzzer.go +++ b/fuzzing/fuzzer.go @@ -2,14 +2,28 @@ package fuzzing import ( "context" + "errors" "fmt" "math/big" "math/rand" + "os" + "path/filepath" + "runtime" "sort" + "strconv" "strings" "sync" "time" + "github.com/ethereum/go-ethereum/crypto" + + "github.com/crytic/medusa/fuzzing/executiontracer" + + "github.com/crytic/medusa/fuzzing/coverage" + "github.com/crytic/medusa/logging" + "github.com/crytic/medusa/logging/colors" + "github.com/rs/zerolog" + "github.com/crytic/medusa/fuzzing/calls" "github.com/crytic/medusa/utils/randomutils" "github.com/ethereum/go-ethereum/core/types" @@ -19,11 +33,11 @@ import ( "github.com/crytic/medusa/fuzzing/config" fuzzerTypes "github.com/crytic/medusa/fuzzing/contracts" "github.com/crytic/medusa/fuzzing/corpus" + fuzzingutils "github.com/crytic/medusa/fuzzing/utils" "github.com/crytic/medusa/fuzzing/valuegeneration" "github.com/crytic/medusa/utils" "github.com/ethereum/go-ethereum/accounts/abi" "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core" "golang.org/x/exp/slices" ) @@ -40,7 +54,11 @@ type Fuzzer struct { senders []common.Address // deployer describes an account address used to deploy contracts in fuzzing campaigns. deployer common.Address - // contractDefinitions defines targets to be fuzzed once their deployment is detected. + + // compilations describes all compilations added as targets. + compilations []compilationTypes.Compilation + // contractDefinitions defines targets to be fuzzed once their deployment is detected. They are derived from + // compilations. contractDefinitions fuzzerTypes.Contracts // baseValueSet represents a valuegeneration.ValueSet containing input values for our fuzz tests. baseValueSet *valuegeneration.ValueSet @@ -68,26 +86,61 @@ type Fuzzer struct { // Hooks describes the replaceable functions used by the Fuzzer. Hooks FuzzerHooks + + // logger describes the Fuzzer's log object that can be used to log important events + logger *logging.Logger } // NewFuzzer returns an instance of a new Fuzzer provided a project configuration, or an error if one is encountered // while initializing the code. func NewFuzzer(config config.ProjectConfig) (*Fuzzer, error) { + // Disable colors if requested + if config.Logging.NoColor { + colors.DisableColor() + } + + // Create the global logger and add stdout as an unstructured output stream + // Note that we are not using the project config's log level because we have not validated it yet + logging.GlobalLogger = logging.NewLogger(config.Logging.Level) + logging.GlobalLogger.AddWriter(os.Stdout, logging.UNSTRUCTURED, !config.Logging.NoColor) + + // If the log directory is a non-empty string, create a file for unstructured, un-colorized file logging + if config.Logging.LogDirectory != "" { + // Filename will be the "log-current_unix_timestamp.log" + filename := "log-" + strconv.FormatInt(time.Now().Unix(), 10) + ".log" + // Create the file + file, err := utils.CreateFile(config.Logging.LogDirectory, filename) + if err != nil { + logging.GlobalLogger.Error("Failed to create log file", err) + return nil, err + } + logging.GlobalLogger.AddWriter(file, logging.UNSTRUCTURED, false) + } + // Validate our provided config err := config.Validate() if err != nil { + logging.GlobalLogger.Error("Invalid configuration", err) return nil, err } + // Update the log level of the global logger now + logging.GlobalLogger.SetLevel(config.Logging.Level) + + // Get the fuzzer's custom sub-logger + logger := logging.GlobalLogger.NewSubLogger("module", "fuzzer") + // Parse the senders addresses from our account config. senders, err := utils.HexStringsToAddresses(config.Fuzzing.SenderAddresses) if err != nil { + logger.Error("Invalid sender address(es)", err) return nil, err } // Parse the deployer address from our account config deployer, err := utils.HexStringToAddress(config.Fuzzing.DeployerAddress) if err != nil { + logger.Error("Invalid deployer address", err) return nil, err } @@ -101,10 +154,12 @@ func NewFuzzer(config config.ProjectConfig) (*Fuzzer, error) { testCases: make([]TestCase, 0), testCasesFinished: make(map[string]TestCase), Hooks: FuzzerHooks{ - NewCallSequenceGeneratorConfigFunc: defaultNewCallSequenceGeneratorConfigFunc, + NewCallSequenceGeneratorConfigFunc: defaultCallSequenceGeneratorConfigFunc, + NewShrinkingValueMutatorFunc: defaultShrinkingValueMutatorFunc, ChainSetupFunc: chainSetupFromCompilations, CallSequenceTestFuncs: make([]CallSequenceTestFunc, 0), }, + logger: logger, } // Add our sender and deployer addresses to the base value set for the value generator, so they will be used as @@ -117,12 +172,14 @@ func NewFuzzer(config config.ProjectConfig) (*Fuzzer, error) { // If we have a compilation config if fuzzer.config.Compilation != nil { // Compile the targets specified in the compilation config - fmt.Printf("Compiling targets (platform '%s') ...\n", fuzzer.config.Compilation.Platform) - compilations, compilationOutput, err := (*fuzzer.config.Compilation).Compile() + fuzzer.logger.Info("Compiling targets with ", colors.Bold, fuzzer.config.Compilation.Platform, colors.Reset) + start := time.Now() + compilations, _, err := (*fuzzer.config.Compilation).Compile() if err != nil { + fuzzer.logger.Error("Failed to compile target", err) return nil, err } - fmt.Printf("%s", compilationOutput) + fuzzer.logger.Info("Finished compiling targets in ", time.Since(start).Round(time.Second)) // Add our compilation targets fuzzer.AddCompilationTargets(compilations) @@ -135,6 +192,9 @@ func NewFuzzer(config config.ProjectConfig) (*Fuzzer, error) { if fuzzer.config.Fuzzing.Testing.AssertionTesting.Enabled { attachAssertionTestCaseProvider(fuzzer) } + if fuzzer.config.Fuzzing.Testing.OptimizationTesting.Enabled { + attachOptimizationTestCaseProvider(fuzzer) + } return fuzzer, nil } @@ -188,6 +248,9 @@ func (f *Fuzzer) RegisterTestCase(testCase TestCase) { f.testCasesLock.Lock() defer f.testCasesLock.Unlock() + // Display what is being tested + f.logger.Info(testCase.LogMessage().Elements()...) + // Append our test case to our list f.testCases = append(f.testCases, testCase) } @@ -209,7 +272,7 @@ func (f *Fuzzer) ReportTestCaseFinished(testCase TestCase) { // We only log here if we're not configured to stop on the first test failure. This is because the fuzzer prints // results on exit, so we avoid duplicate messages. if !f.config.Fuzzing.Testing.StopOnFailedTest { - fmt.Printf("\n[%s] %s\n%s\n\n", testCase.Status(), testCase.Name(), testCase.Message()) + f.logger.Info(testCase.LogMessage().Elements()...) } // If the config specifies, we stop after the first failed test reported. @@ -221,19 +284,56 @@ func (f *Fuzzer) ReportTestCaseFinished(testCase TestCase) { // AddCompilationTargets takes a compilation and updates the Fuzzer state with additional Fuzzer.ContractDefinitions // definitions and Fuzzer.BaseValueSet values. func (f *Fuzzer) AddCompilationTargets(compilations []compilationTypes.Compilation) { - // Loop for each contract in each compilation and deploy it to the test node. - for _, comp := range compilations { - for sourcePath, source := range comp.Sources { + // Loop for each contract in each compilation and deploy it to the test chain + for i := 0; i < len(compilations); i++ { + // Add our compilation to the list and get a reference to it. + f.compilations = append(f.compilations, compilations[i]) + compilation := &f.compilations[len(f.compilations)-1] + + // Loop for each source + for sourcePath, source := range compilation.SourcePathToArtifact { // Seed our base value set from every source's AST f.baseValueSet.SeedFromAst(source.Ast) // Loop for every contract and register it in our contract definitions for contractName := range source.Contracts { contract := source.Contracts[contractName] - contractDefinition := fuzzerTypes.NewContract(contractName, sourcePath, &contract) + + // Skip interfaces. + if contract.Kind == compilationTypes.ContractKindInterface { + continue + } + + contractDefinition := fuzzerTypes.NewContract(contractName, sourcePath, &contract, compilation) + + // Sort available methods by type + assertionTestMethods, propertyTestMethods, optimizationTestMethods := fuzzingutils.BinTestByType(&contract, + f.config.Fuzzing.Testing.PropertyTesting.TestPrefixes, + f.config.Fuzzing.Testing.OptimizationTesting.TestPrefixes, + f.config.Fuzzing.Testing.AssertionTesting.TestViewMethods) + contractDefinition.AssertionTestMethods = assertionTestMethods + contractDefinition.PropertyTestMethods = propertyTestMethods + contractDefinition.OptimizationTestMethods = optimizationTestMethods + + // Filter and record methods available for assertion testing. Property and optimization tests are always run. + if len(f.config.Fuzzing.Testing.TargetFunctionSignatures) > 0 { + // Only consider methods that are in the target methods list + contractDefinition = contractDefinition.WithTargetedAssertionMethods(f.config.Fuzzing.Testing.TargetFunctionSignatures) + } + if len(f.config.Fuzzing.Testing.ExcludeFunctionSignatures) > 0 { + // Consider all methods except those in the exclude methods list + contractDefinition = contractDefinition.WithExcludedAssertionMethods(f.config.Fuzzing.Testing.ExcludeFunctionSignatures) + } + f.contractDefinitions = append(f.contractDefinitions, contractDefinition) } } + + // Cache all of our source code if it hasn't been already. + err := compilation.CacheSourceCode() + if err != nil { + f.logger.Warn("Failed to cache compilation source file data", err) + } } } @@ -241,21 +341,50 @@ func (f *Fuzzer) AddCompilationTargets(compilations []compilationTypes.Compilati func (f *Fuzzer) createTestChain() (*chain.TestChain, error) { // Create our genesis allocations. // NOTE: Sharing GenesisAlloc between chains will result in some accounts not being funded for some reason. - genesisAlloc := make(core.GenesisAlloc) + genesisAlloc := make(types.GenesisAlloc) // Fund all of our sender addresses in the genesis block initBalance := new(big.Int).Div(abi.MaxInt256, big.NewInt(2)) // TODO: make this configurable for _, sender := range f.senders { - genesisAlloc[sender] = core.GenesisAccount{ + genesisAlloc[sender] = types.Account{ Balance: initBalance, } } // Fund our deployer address in the genesis block - genesisAlloc[f.deployer] = core.GenesisAccount{ + genesisAlloc[f.deployer] = types.Account{ Balance: initBalance, } + // Identify which contracts need to be predeployed to a deterministic address by iterating across the mapping + contractAddressOverrides := make(map[common.Hash]common.Address, len(f.config.Fuzzing.PredeployedContracts)) + for contractName, addrStr := range f.config.Fuzzing.PredeployedContracts { + found := false + // Try to find the associated compilation artifact + for _, contract := range f.contractDefinitions { + if contract.Name() == contractName { + // Hash the init bytecode (so that it can be easily identified in the EVM) and map it to the + // requested address + initBytecodeHash := crypto.Keccak256Hash(contract.CompiledContract().InitBytecode) + contractAddr, err := utils.HexStringToAddress(addrStr) + if err != nil { + return nil, fmt.Errorf("invalid address provided for a predeployed contract: %v", contract.Name()) + } + contractAddressOverrides[initBytecodeHash] = contractAddr + found = true + break + } + } + + // Throw an error if the contract specified in the config is not found + if !found { + return nil, fmt.Errorf("%v was specified in the predeployed contracts but was not found in the compilation artifacts", contractName) + } + } + + // Update the test chain config with the contract address overrides + f.config.Fuzzing.TestChainConfig.ContractAddressOverrides = contractAddressOverrides + // Create our test chain with our basic allocations and passed medusa's chain configuration testChain, err := chain.NewTestChain(genesisAlloc, &f.config.Fuzzing.TestChainConfig) @@ -268,71 +397,124 @@ func (f *Fuzzer) createTestChain() (*chain.TestChain, error) { // all compiled contract definitions. This includes any successful compilations as a result of the Fuzzer.config // definitions, as well as those added by Fuzzer.AddCompilationTargets. The contract deployment order is defined by // the Fuzzer.config. -func chainSetupFromCompilations(fuzzer *Fuzzer, testChain *chain.TestChain) error { - // Verify contract deployment order is not empty. If it's empty, but we only have one contract definition, - // we can infer the deployment order. Otherwise, we report an error. - if len(fuzzer.config.Fuzzing.DeploymentOrder) == 0 { - if len(fuzzer.contractDefinitions) == 1 { - fuzzer.config.Fuzzing.DeploymentOrder = []string{fuzzer.contractDefinitions[0].Name()} - } else { - return fmt.Errorf("you must specify a contract deployment order within your project configuration") +func chainSetupFromCompilations(fuzzer *Fuzzer, testChain *chain.TestChain) (*executiontracer.ExecutionTrace, error) { + // Verify that target contracts is not empty. If it's empty, but we only have one contract definition, + // we can infer the target contracts. Otherwise, we report an error. + if len(fuzzer.config.Fuzzing.TargetContracts) == 0 { + var found bool + for _, contract := range fuzzer.contractDefinitions { + // If only one contract is defined, we can infer the target contract by filtering interfaces/libraries. + if contract.CompiledContract().Kind == compilationTypes.ContractKindContract { + if !found { + fuzzer.config.Fuzzing.TargetContracts = []string{contract.Name()} + found = true + } else { + // TODO list options for the user to choose from + return nil, fmt.Errorf("specify target contract(s)") + } + } } } - // Loop for all contracts to deploy + // Concatenate the predeployed contracts and target contracts + // Ordering is important here (predeploys _then_ targets) so that you can have the same contract in both lists + // while still being able to use the contract address overrides + contractsToDeploy := make([]string, 0) + balances := make([]*big.Int, 0) + for contractName := range fuzzer.config.Fuzzing.PredeployedContracts { + contractsToDeploy = append(contractsToDeploy, contractName) + // Preserve index of target contract balances + balances = append(balances, big.NewInt(0)) + } + contractsToDeploy = append(contractsToDeploy, fuzzer.config.Fuzzing.TargetContracts...) + balances = append(balances, fuzzer.config.Fuzzing.TargetContractsBalances...) + deployedContractAddr := make(map[string]common.Address) - for _, contractName := range fuzzer.config.Fuzzing.DeploymentOrder { + // Loop for all contracts to deploy + for i, contractName := range contractsToDeploy { // Look for a contract in our compiled contract definitions that matches this one found := false for _, contract := range fuzzer.contractDefinitions { // If we found a contract definition that matches this definition by name, try to deploy it if contract.Name() == contractName { + // Concatenate constructor arguments, if necessary args := make([]any, 0) if len(contract.CompiledContract().Abi.Constructor.Inputs) > 0 { + // If the contract is a predeployed contract, throw an error because they do not accept constructor + // args. + if _, ok := fuzzer.config.Fuzzing.PredeployedContracts[contractName]; ok { + return nil, fmt.Errorf("predeployed contracts cannot accept constructor arguments") + } jsonArgs, ok := fuzzer.config.Fuzzing.ConstructorArgs[contractName] if !ok { - return fmt.Errorf("constructor arguments for contract %s not provided", contractName) + return nil, fmt.Errorf("constructor arguments for contract %s not provided", contractName) } decoded, err := valuegeneration.DecodeJSONArgumentsFromMap(contract.CompiledContract().Abi.Constructor.Inputs, jsonArgs, deployedContractAddr) if err != nil { - return err + return nil, err } args = decoded } - // Constructor our deployment message/tx data field + // Construct our deployment message/tx data field msgData, err := contract.CompiledContract().GetDeploymentMessageData(args) if err != nil { - return fmt.Errorf("initial contract deployment failed for contract \"%v\", error: %v", contractName, err) + return nil, fmt.Errorf("initial contract deployment failed for contract \"%v\", error: %v", contractName, err) + } + + // If our project config has a non-zero balance for this target contract, retrieve it + contractBalance := big.NewInt(0) + if len(balances) > i { + contractBalance = new(big.Int).Set(balances[i]) } // Create a message to represent our contract deployment (we let deployments consume the whole block // gas limit rather than use tx gas limit) - msg := calls.NewCallMessage(fuzzer.deployer, nil, 0, big.NewInt(0), fuzzer.config.Fuzzing.BlockGasLimit, nil, nil, nil, msgData) + msg := calls.NewCallMessage(fuzzer.deployer, nil, 0, contractBalance, fuzzer.config.Fuzzing.BlockGasLimit, nil, nil, nil, msgData) msg.FillFromTestChainProperties(testChain) // Create a new pending block we'll commit to chain block, err := testChain.PendingBlockCreate() if err != nil { - return err + return nil, err } // Add our transaction to the block - err = testChain.PendingBlockAddTx(msg) + err = testChain.PendingBlockAddTx(msg.ToCoreMessage()) if err != nil { - return err + return nil, err } // Commit the pending block to the chain, so it becomes the new head. err = testChain.PendingBlockCommit() if err != nil { - return err + return nil, err } - // Ensure our transaction succeeded + // Ensure our transaction succeeded and, if it did not, attach an execution trace to it and re-run it. + // The execution trace will be returned so that it can be provided to the user for debugging if block.MessageResults[0].Receipt.Status != types.ReceiptStatusSuccessful { - return fmt.Errorf("contract deployment tx returned a failed status: %v", block.MessageResults[0].ExecutionResult.Err) + // Create a call sequence element to represent the failed contract deployment tx + cse := calls.NewCallSequenceElement(nil, msg, 0, 0) + cse.ChainReference = &calls.CallSequenceElementChainReference{ + Block: block, + TransactionIndex: len(block.Messages) - 1, + } + // Revert to genesis and re-run the failed contract deployment tx. + // We should be able to attach an execution trace; however, if it fails, we provide the ExecutionResult at a minimum. + err = testChain.RevertToBlockNumber(0) + if err != nil { + return nil, fmt.Errorf("failed to reset to genesis block: %v", err) + } else { + _, err = calls.ExecuteCallSequenceWithExecutionTracer(testChain, fuzzer.contractDefinitions, []*calls.CallSequenceElement{cse}, true) + if err != nil { + return nil, fmt.Errorf("deploying %s returned a failed status: %v", contractName, block.MessageResults[0].ExecutionResult.Err) + } + } + + // Return the execution error and the execution trace, if possible. + return cse.ExecutionTrace, fmt.Errorf("deploying %s returned a failed status: %v", contractName, block.MessageResults[0].ExecutionResult.Err) } // Record our deployed contract so the next config-specified constructor args can reference this @@ -348,17 +530,17 @@ func chainSetupFromCompilations(fuzzer *Fuzzer, testChain *chain.TestChain) erro // If we did not find a contract corresponding to this item in the deployment order, we throw an error. if !found { - return fmt.Errorf("DeploymentOrder specified a contract name which was not found in the compilation: %v\n", contractName) + return nil, fmt.Errorf("%v was specified in the target contracts but was not found in the compilation artifacts", contractName) } } - return nil + return nil, nil } -// defaultNewCallSequenceGeneratorConfigFunc is a NewCallSequenceGeneratorConfigFunc which creates a +// defaultCallSequenceGeneratorConfigFunc is a NewCallSequenceGeneratorConfigFunc which creates a // CallSequenceGeneratorConfig with a default configuration. Returns the config or an error, if one occurs. -func defaultNewCallSequenceGeneratorConfigFunc(fuzzer *Fuzzer, valueSet *valuegeneration.ValueSet, randomProvider *rand.Rand) (*CallSequenceGeneratorConfig, error) { - // Create the underlying value generator for the worker and its sequence generator. - valueGenConfig := &valuegeneration.MutatingValueGeneratorConfig{ +func defaultCallSequenceGeneratorConfigFunc(fuzzer *Fuzzer, valueSet *valuegeneration.ValueSet, randomProvider *rand.Rand) (*CallSequenceGeneratorConfig, error) { + // Create the value generator and mutator for the worker. + mutationalGeneratorConfig := &valuegeneration.MutationalValueGeneratorConfig{ MinMutationRounds: 0, MaxMutationRounds: 1, GenerateRandomAddressBias: 0.5, @@ -384,7 +566,7 @@ func defaultNewCallSequenceGeneratorConfigFunc(fuzzer *Fuzzer, valueSet *valuege GenerateRandomStringMaxSize: 100, }, } - valueGenerator := valuegeneration.NewMutatingValueGenerator(valueGenConfig, valueSet, randomProvider) + mutationalGenerator := valuegeneration.NewMutationalValueGenerator(mutationalGeneratorConfig, valueSet, randomProvider) // Create a sequence generator config which uses the created value generator. sequenceGenConfig := &CallSequenceGeneratorConfig{ @@ -397,11 +579,23 @@ func defaultNewCallSequenceGeneratorConfigFunc(fuzzer *Fuzzer, valueSet *valuege RandomMutatedCorpusTailWeight: 10, RandomMutatedSpliceAtRandomWeight: 20, RandomMutatedInterleaveAtRandomWeight: 10, - ValueGenerator: valueGenerator, + ValueGenerator: mutationalGenerator, + ValueMutator: mutationalGenerator, } return sequenceGenConfig, nil } +// defaultShrinkingValueMutatorFunc is a NewShrinkingValueMutatorFunc which creates value mutator to be used for +// shrinking purposes. Returns the value mutator or an error, if one occurs. +func defaultShrinkingValueMutatorFunc(fuzzer *Fuzzer, valueSet *valuegeneration.ValueSet, randomProvider *rand.Rand) (valuegeneration.ValueMutator, error) { + // Create the shrinking value mutator for the worker. + shrinkingValueMutatorConfig := &valuegeneration.ShrinkingValueMutatorConfig{ + ShrinkValueProbability: 0.1, + } + shrinkingValueMutator := valuegeneration.NewShrinkingValueMutator(shrinkingValueMutatorConfig, valueSet, randomProvider) + return shrinkingValueMutator, nil +} + // spawnWorkersLoop is a method which spawns a config-defined amount of FuzzerWorker to carry out the fuzzing campaign. // This function exits when Fuzzer.ctx is cancelled. func (f *Fuzzer) spawnWorkersLoop(baseTestChain *chain.TestChain) error { @@ -428,8 +622,7 @@ func (f *Fuzzer) spawnWorkersLoop(baseTestChain *chain.TestChain) error { // Define a flag that indicates whether we have not cancelled o working := !utils.CheckContextDone(f.ctx) - // Log that we are about to create the workers and start fuzzing - fmt.Printf("Creating %d workers ...\n", f.config.Fuzzing.Workers) + // Create workers and start fuzzing. var err error for err == nil && working { // Send an item into our channel to queue up a spot. This will block us if we hit capacity until a worker @@ -527,13 +720,15 @@ func (f *Fuzzer) Start() error { // If we set a timeout, create the timeout context now, as we're about to begin fuzzing. if f.config.Fuzzing.Timeout > 0 { - fmt.Printf("Running with timeout of %d seconds\n", f.config.Fuzzing.Timeout) + f.logger.Info("Running with a timeout of ", colors.Bold, f.config.Fuzzing.Timeout, " seconds") f.ctx, f.ctxCancelFunc = context.WithTimeout(f.ctx, time.Duration(f.config.Fuzzing.Timeout)*time.Second) } // Set up the corpus + f.logger.Info("Initializing corpus") f.corpus, err = corpus.NewCorpus(f.config.Fuzzing.CorpusDirectory) if err != nil { + f.logger.Error("Failed to create the corpus", err) return err } @@ -549,32 +744,75 @@ func (f *Fuzzer) Start() error { // Create our test chain baseTestChain, err := f.createTestChain() if err != nil { + f.logger.Error("Failed to create the test chain", err) return err } // Set it up with our deployment/setup strategy defined by the fuzzer. - err = f.Hooks.ChainSetupFunc(f, baseTestChain) + f.logger.Info("Setting up test chain") + trace, err := f.Hooks.ChainSetupFunc(f, baseTestChain) if err != nil { + if trace != nil { + f.logger.Error("Failed to initialize the test chain", err, errors.New(trace.Log().ColorString())) + } else { + f.logger.Error("Failed to initialize the test chain", err) + } return err } + f.logger.Info("Finished setting up test chain") // Initialize our coverage maps by measuring the coverage we get from the corpus. - err = f.corpus.Initialize(baseTestChain, f.contractDefinitions) + var corpusActiveSequences, corpusTotalSequences int + if totalCallSequences, testResults := f.corpus.CallSequenceEntryCount(); totalCallSequences > 0 || testResults > 0 { + f.logger.Info("Running call sequences in the corpus") + } + startTime := time.Now() + corpusActiveSequences, corpusTotalSequences, err = f.corpus.Initialize(baseTestChain, f.contractDefinitions) + if corpusTotalSequences > 0 { + f.logger.Info("Finished running call sequences in the corpus in ", time.Since(startTime).Round(time.Second)) + } if err != nil { + f.logger.Error("Failed to initialize the corpus", err) return err } + // Log corpus health statistics, if we have any existing sequences. + if corpusTotalSequences > 0 { + f.logger.Info( + colors.Bold, "corpus: ", colors.Reset, + "health: ", colors.Bold, int(float32(corpusActiveSequences)/float32(corpusTotalSequences)*100.0), "%", colors.Reset, ", ", + "sequences: ", colors.Bold, corpusTotalSequences, " (", corpusActiveSequences, " valid, ", corpusTotalSequences-corpusActiveSequences, " invalid)", colors.Reset, + ) + } + + // Log the start of our fuzzing campaign. + f.logger.Info("Fuzzing with ", colors.Bold, f.config.Fuzzing.Workers, colors.Reset, " workers") + // Start our printing loop now that we're about to begin fuzzing. go f.printMetricsLoop() // Publish a fuzzer starting event. err = f.Events.FuzzerStarting.Publish(FuzzerStartingEvent{Fuzzer: f}) if err != nil { + f.logger.Error("FuzzerStarting event subscriber returned an error", err) + return err + } + + // If StopOnNoTests is true and there are no test cases, then throw an error + if f.config.Fuzzing.Testing.StopOnNoTests && len(f.testCases) == 0 { + err = fmt.Errorf("no assertion, property, optimization, or custom tests were found to fuzz") + if !f.config.Fuzzing.Testing.AssertionTesting.TestViewMethods { + err = fmt.Errorf("no assertion, property, optimization, or custom tests were found to fuzz and testing view methods is disabled") + } + f.logger.Error("Failed to start fuzzer", err) return err } // Run the main worker loop err = f.spawnWorkersLoop(baseTestChain) + if err != nil { + f.logger.Error("Encountered an error in the main fuzzing loop", err) + } // NOTE: After this point, we capture errors but do not return immediately, as we want to exit gracefully. @@ -582,8 +820,9 @@ func (f *Fuzzer) Start() error { // previous error, as we don't want to lose corpus entries. if f.config.Fuzzing.CoverageEnabled { corpusFlushErr := f.corpus.Flush() - if err == nil { + if err == nil && corpusFlushErr != nil { err = corpusFlushErr + f.logger.Info("Failed to flush the corpus", err) } } @@ -591,11 +830,43 @@ func (f *Fuzzer) Start() error { fuzzerStoppingErr := f.Events.FuzzerStopping.Publish(FuzzerStoppingEvent{Fuzzer: f, err: err}) if err == nil && fuzzerStoppingErr != nil { err = fuzzerStoppingErr + f.logger.Error("FuzzerStopping event subscriber returned an error", err) } // Print our results on exit. f.printExitingResults() + // Finally, generate our coverage report if we have set a valid corpus directory. + if err == nil && len(f.config.Fuzzing.CoverageFormats) > 0 { + // Write to the default directory if we have no corpus directory set. + coverageReportDir := filepath.Join("crytic-export", "coverage") + if f.config.Fuzzing.CorpusDirectory != "" { + coverageReportDir = filepath.Join(f.config.Fuzzing.CorpusDirectory, "coverage") + } + sourceAnalysis, err := coverage.AnalyzeSourceCoverage(f.compilations, f.corpus.CoverageMaps()) + + if err != nil { + f.logger.Error("Failed to analyze source coverage", err) + } else { + var path string + for _, reportType := range f.config.Fuzzing.CoverageFormats { + switch reportType { + case "html": + path, err = coverage.WriteHTMLReport(sourceAnalysis, coverageReportDir) + case "lcov": + path, err = coverage.WriteLCOVReport(sourceAnalysis, coverageReportDir) + default: + err = fmt.Errorf("unsupported coverage report type: %s", reportType) + } + if err != nil { + f.logger.Error(fmt.Sprintf("Failed to generate %s coverage report", reportType), err) + } else { + f.logger.Info(fmt.Sprintf("%s report(s) saved to: %s", reportType, path), colors.Bold, colors.Reset) + } + } + } + } + // Return any encountered error. return err } @@ -618,38 +889,55 @@ func (f *Fuzzer) printMetricsLoop() { lastCallsTested := big.NewInt(0) lastSequencesTested := big.NewInt(0) lastWorkerStartupCount := big.NewInt(0) + lastGasUsed := big.NewInt(0) lastPrintedTime := time.Time{} for !utils.CheckContextDone(f.ctx) { // Obtain our metrics callsTested := f.metrics.CallsTested() sequencesTested := f.metrics.SequencesTested() + gasUsed := f.metrics.GasUsed() + failedSequences := f.metrics.FailedSequences() workerStartupCount := f.metrics.WorkerStartupCount() + workersShrinking := f.metrics.WorkersShrinkingCount() // Calculate time elapsed since the last update secondsSinceLastUpdate := time.Since(lastPrintedTime).Seconds() + // Obtain memory usage stats + var memStats runtime.MemStats + runtime.ReadMemStats(&memStats) + memoryUsedMB := memStats.Alloc / 1024 / 1024 + memoryTotalMB := memStats.Sys / 1024 / 1024 + // Print a metrics update - fmt.Printf( - "fuzz: elapsed: %s, call: %d (%d/sec), seq/s: %d, resets/s: %d, cov: %d\n", - time.Since(startTime).Round(time.Second), - callsTested, - uint64(float64(new(big.Int).Sub(callsTested, lastCallsTested).Uint64())/secondsSinceLastUpdate), - uint64(float64(new(big.Int).Sub(sequencesTested, lastSequencesTested).Uint64())/secondsSinceLastUpdate), - uint64(float64(new(big.Int).Sub(workerStartupCount, lastWorkerStartupCount).Uint64())/secondsSinceLastUpdate), - f.corpus.ActiveCallSequenceCount(), - ) + logBuffer := logging.NewLogBuffer() + logBuffer.Append(colors.Bold, "fuzz: ", colors.Reset) + logBuffer.Append("elapsed: ", colors.Bold, time.Since(startTime).Round(time.Second).String(), colors.Reset) + logBuffer.Append(", calls: ", colors.Bold, fmt.Sprintf("%d (%d/sec)", callsTested, uint64(float64(new(big.Int).Sub(callsTested, lastCallsTested).Uint64())/secondsSinceLastUpdate)), colors.Reset) + logBuffer.Append(", seq/s: ", colors.Bold, fmt.Sprintf("%d", uint64(float64(new(big.Int).Sub(sequencesTested, lastSequencesTested).Uint64())/secondsSinceLastUpdate)), colors.Reset) + logBuffer.Append(", coverage: ", colors.Bold, fmt.Sprintf("%d", f.corpus.CoverageMaps().UniquePCs()), colors.Reset) + logBuffer.Append(", corpus: ", colors.Bold, fmt.Sprintf("%d", f.corpus.ActiveMutableSequenceCount()), colors.Reset) + logBuffer.Append(", failures: ", colors.Bold, fmt.Sprintf("%d/%d", failedSequences, sequencesTested), colors.Reset) + logBuffer.Append(", gas/s: ", colors.Bold, fmt.Sprintf("%d", uint64(float64(new(big.Int).Sub(gasUsed, lastGasUsed).Uint64())/secondsSinceLastUpdate)), colors.Reset) + if f.logger.Level() <= zerolog.DebugLevel { + logBuffer.Append(", shrinking: ", colors.Bold, fmt.Sprintf("%v", workersShrinking), colors.Reset) + logBuffer.Append(", mem: ", colors.Bold, fmt.Sprintf("%v/%v MB", memoryUsedMB, memoryTotalMB), colors.Reset) + logBuffer.Append(", resets/s: ", colors.Bold, fmt.Sprintf("%d", uint64(float64(new(big.Int).Sub(workerStartupCount, lastWorkerStartupCount).Uint64())/secondsSinceLastUpdate)), colors.Reset) + } + f.logger.Info(logBuffer.Elements()...) // Update our delta tracking metrics lastPrintedTime = time.Now() lastCallsTested = callsTested lastSequencesTested = sequencesTested + lastGasUsed = gasUsed lastWorkerStartupCount = workerStartupCount // If we reached our transaction threshold, halt testLimit := f.config.Fuzzing.TestLimit if testLimit > 0 && (!callsTested.IsUint64() || callsTested.Uint64() >= testLimit) { - fmt.Printf("transaction test limit reached, halting now ...\n") + f.logger.Info("Transaction test limit reached, halting now...") f.Stop() break } @@ -689,17 +977,9 @@ func (f *Fuzzer) printExitingResults() { ) // Print the results of each individual test case. - fmt.Printf("\n") - fmt.Printf("Fuzzer stopped, test results follow below ...\n") + f.logger.Info("Fuzzer stopped, test results follow below ...") for _, testCase := range f.testCases { - // Obtain the test case message. If it is a non-empty string, we format our output for it specially. - // Otherwise, we exclude it. - msg := strings.TrimSpace(testCase.Message()) - if msg != "" { - fmt.Printf("[%s] %s\n%s\n\n", testCase.Status(), strings.TrimSpace(testCase.Name()), msg) - } else { - fmt.Printf("[%s] %s\n", testCase.Status(), testCase.Name()) - } + f.logger.Info(testCase.LogMessage().ColorString()) // Tally our pass/fail count. if testCase.Status() == TestCaseStatusPassed { @@ -710,6 +990,5 @@ func (f *Fuzzer) printExitingResults() { } // Print our final tally of test statuses. - fmt.Printf("\n") - fmt.Printf("%d test(s) passed, %d test(s) failed\n", testCountPassed, testCountFailed) + f.logger.Info("Test summary: ", colors.GreenBold, testCountPassed, colors.Reset, " test(s) passed, ", colors.RedBold, testCountFailed, colors.Reset, " test(s) failed") } diff --git a/fuzzing/fuzzer_hooks.go b/fuzzing/fuzzer_hooks.go index d590d445..11d0e450 100644 --- a/fuzzing/fuzzer_hooks.go +++ b/fuzzing/fuzzer_hooks.go @@ -1,35 +1,50 @@ package fuzzing import ( + "math/rand" + + "github.com/crytic/medusa/fuzzing/executiontracer" + "github.com/crytic/medusa/chain" "github.com/crytic/medusa/fuzzing/calls" "github.com/crytic/medusa/fuzzing/valuegeneration" - "math/rand" ) // FuzzerHooks defines the hooks that can be used for the Fuzzer on an API level. type FuzzerHooks struct { // NewCallSequenceGeneratorConfigFunc describes the function to use to set up a new CallSequenceGeneratorConfig, // defining parameters for a new FuzzerWorker's CallSequenceGenerator. - // Note: The value generator provided within the config must be either thread safe, or a new instance must be - // provided per call to avoid concurrent access issues between workers. + // The value generator provided must be either thread safe, or a new instance must be provided per invocation to + // avoid concurrent access issues between workers. NewCallSequenceGeneratorConfigFunc NewCallSequenceGeneratorConfigFunc + // NewShrinkingValueMutatorFunc describes the function used to set up a value mutator used to shrink call + // values in the fuzzer's call sequence shrinking process. + // The value mutator provided must be either thread safe, or a new instance must be provided per invocation to + // avoid concurrent access issues between workers. + NewShrinkingValueMutatorFunc NewShrinkingValueMutatorFunc + // ChainSetupFunc describes the function to use to set up a new test chain's initial state prior to fuzzing. ChainSetupFunc TestChainSetupFunc // CallSequenceTestFuncs describes a list of functions to be called upon by a FuzzerWorker after every call - // in a call sequence. + // in a call sequence. These must not commit to state CallSequenceTestFuncs []CallSequenceTestFunc } +// NewShrinkingValueMutatorFunc describes the function used to set up a value mutator used to shrink call +// values in the fuzzer's call sequence shrinking process. +// Returns a new value mutator, or an error if one occurred. +type NewShrinkingValueMutatorFunc func(fuzzer *Fuzzer, valueSet *valuegeneration.ValueSet, randomProvider *rand.Rand) (valuegeneration.ValueMutator, error) + // NewCallSequenceGeneratorConfigFunc defines a method is called to create a new CallSequenceGeneratorConfig, defining // the parameters for the new FuzzerWorker to use when creating its CallSequenceGenerator used to power fuzzing. // Returns a new CallSequenceGeneratorConfig, or an error if one is encountered. type NewCallSequenceGeneratorConfigFunc func(fuzzer *Fuzzer, valueSet *valuegeneration.ValueSet, randomProvider *rand.Rand) (*CallSequenceGeneratorConfig, error) // TestChainSetupFunc describes a function which sets up a test chain's initial state prior to fuzzing. -type TestChainSetupFunc func(fuzzer *Fuzzer, testChain *chain.TestChain) error +// An execution trace can also be returned in case of a deployment error for an improved debugging experience +type TestChainSetupFunc func(fuzzer *Fuzzer, testChain *chain.TestChain) (*executiontracer.ExecutionTrace, error) // CallSequenceTestFunc defines a method called after a fuzzing.FuzzerWorker sends another call in a types.CallSequence // during a fuzzing campaign. It returns a ShrinkCallSequenceRequest set, which represents a set of requests for @@ -47,7 +62,7 @@ type ShrinkCallSequenceRequest struct { VerifierFunction func(worker *FuzzerWorker, callSequence calls.CallSequence) (bool, error) // FinishedCallback is a method called upon when the shrink request has concluded. It provides the finalized // shrunken call sequence. - FinishedCallback func(worker *FuzzerWorker, shrunkenCallSequence calls.CallSequence) error + FinishedCallback func(worker *FuzzerWorker, shrunkenCallSequence calls.CallSequence, verboseTracing bool) error // RecordResultInCorpus indicates whether the shrunken call sequence should be recorded in the corpus. If so, when // the shrinking operation is completed, the sequence will be added to the corpus if it doesn't already exist. RecordResultInCorpus bool diff --git a/fuzzing/fuzzer_metrics.go b/fuzzing/fuzzer_metrics.go index 9d64c91a..a1ef8f11 100644 --- a/fuzzing/fuzzer_metrics.go +++ b/fuzzing/fuzzer_metrics.go @@ -11,14 +11,23 @@ type FuzzerMetrics struct { // fuzzerWorkerMetrics represents metrics for a single FuzzerWorker instance. type fuzzerWorkerMetrics struct { - // sequencesTested describes the amount of sequences of transactions which tests were run against. + // sequencesTested is the amount of sequences of transactions which tests were run against. sequencesTested *big.Int - // callsTested describes the amount of transactions/calls the fuzzer executed and ran tests against. + // failedSequences is the amount of sequences of transactions which tests failed. + failedSequences *big.Int + + // callsTested is the amount of transactions/calls the fuzzer executed and ran tests against. callsTested *big.Int - // workerStartupCount describes the amount of times the worker was generated, or re-generated for this index. + // gasUsed is the amount of gas the fuzzer executed and ran tests against. + gasUsed *big.Int + + // workerStartupCount is the amount of times the worker was generated, or re-generated for this index. workerStartupCount *big.Int + + // shrinking indicates whether the fuzzer worker is currently shrinking. + shrinking bool } // newFuzzerMetrics obtains a new FuzzerMetrics struct for a given number of workers specified by workerCount. @@ -30,12 +39,23 @@ func newFuzzerMetrics(workerCount int) *FuzzerMetrics { } for i := 0; i < len(metrics.workerMetrics); i++ { metrics.workerMetrics[i].sequencesTested = big.NewInt(0) + metrics.workerMetrics[i].failedSequences = big.NewInt(0) metrics.workerMetrics[i].callsTested = big.NewInt(0) metrics.workerMetrics[i].workerStartupCount = big.NewInt(0) + metrics.workerMetrics[i].gasUsed = big.NewInt(0) } return &metrics } +// FailedSequences returns the number of sequences that led to failures across all workers +func (m *FuzzerMetrics) FailedSequences() *big.Int { + failedSequences := big.NewInt(0) + for _, workerMetrics := range m.workerMetrics { + failedSequences.Add(failedSequences, workerMetrics.failedSequences) + } + return failedSequences +} + // SequencesTested returns the amount of sequences of transactions the fuzzer executed and ran tests against. func (m *FuzzerMetrics) SequencesTested() *big.Int { sequencesTested := big.NewInt(0) @@ -54,6 +74,14 @@ func (m *FuzzerMetrics) CallsTested() *big.Int { return transactionsTested } +func (m *FuzzerMetrics) GasUsed() *big.Int { + gasUsed := big.NewInt(0) + for _, workerMetrics := range m.workerMetrics { + gasUsed.Add(gasUsed, workerMetrics.gasUsed) + } + return gasUsed +} + // WorkerStartupCount describes the amount of times the worker was spawned for this index. Workers are periodically // reset. func (m *FuzzerMetrics) WorkerStartupCount() *big.Int { @@ -63,3 +91,14 @@ func (m *FuzzerMetrics) WorkerStartupCount() *big.Int { } return workerStartupCount } + +// WorkersShrinkingCount returns the amount of workers currently performing shrinking operations. +func (m *FuzzerMetrics) WorkersShrinkingCount() uint64 { + shrinkingCount := uint64(0) + for _, workerMetrics := range m.workerMetrics { + if workerMetrics.shrinking { + shrinkingCount++ + } + } + return shrinkingCount +} diff --git a/fuzzing/fuzzer_test.go b/fuzzing/fuzzer_test.go index 0334a32a..06f0992b 100644 --- a/fuzzing/fuzzer_test.go +++ b/fuzzing/fuzzer_test.go @@ -1,13 +1,20 @@ package fuzzing import ( + "encoding/hex" + "math/big" + "math/rand" + "reflect" + "testing" + + "github.com/crytic/medusa/fuzzing/executiontracer" + "github.com/crytic/medusa/chain" "github.com/crytic/medusa/events" "github.com/crytic/medusa/fuzzing/calls" "github.com/crytic/medusa/fuzzing/valuegeneration" "github.com/crytic/medusa/utils" - "math/rand" - "testing" + "github.com/ethereum/go-ethereum/common" "github.com/crytic/medusa/fuzzing/config" "github.com/stretchr/testify/assert" @@ -18,9 +25,9 @@ func TestFuzzerHooks(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/assertions/assert_immediate.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} config.Fuzzing.Testing.PropertyTesting.Enabled = false - config.Fuzzing.Testing.AssertionTesting.Enabled = true + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Attach to fuzzer hooks which simply set a success state. @@ -31,7 +38,7 @@ func TestFuzzerHooks(t *testing.T) { return existingSeqGenConfigFunc(fuzzer, valueSet, randomProvider) } existingChainSetupFunc := f.fuzzer.Hooks.ChainSetupFunc - f.fuzzer.Hooks.ChainSetupFunc = func(fuzzer *Fuzzer, testChain *chain.TestChain) error { + f.fuzzer.Hooks.ChainSetupFunc = func(fuzzer *Fuzzer, testChain *chain.TestChain) (*executiontracer.ExecutionTrace, error) { chainSetupOk = true return existingChainSetupFunc(fuzzer, testChain) } @@ -55,25 +62,43 @@ func TestFuzzerHooks(t *testing.T) { }) } -// TestAssertionsBasicSolving runs tests to ensure that assertion testing behaves as expected. -func TestAssertionsBasicSolving(t *testing.T) { +// TestAssertionMode runs tests to ensure that assertion testing behaves as expected. +func TestAssertionMode(t *testing.T) { filePaths := []string{ "testdata/contracts/assertions/assert_immediate.sol", "testdata/contracts/assertions/assert_even_number.sol", + "testdata/contracts/assertions/assert_arithmetic_underflow.sol", + "testdata/contracts/assertions/assert_divide_by_zero.sol", + "testdata/contracts/assertions/assert_enum_type_conversion_outofbounds.sol", + "testdata/contracts/assertions/assert_incorrect_storage_access.sol", + "testdata/contracts/assertions/assert_pop_empty_array.sol", + "testdata/contracts/assertions/assert_outofbounds_array_access.sol", + "testdata/contracts/assertions/assert_allocate_too_much_memory.sol", + "testdata/contracts/assertions/assert_call_uninitialized_variable.sol", + "testdata/contracts/assertions/assert_constant_method.sol", } for _, filePath := range filePaths { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: filePath, configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} + config.Fuzzing.Testing.AssertionTesting.PanicCodeConfig.FailOnAssertion = true + config.Fuzzing.Testing.AssertionTesting.PanicCodeConfig.FailOnAllocateTooMuchMemory = true + config.Fuzzing.Testing.AssertionTesting.PanicCodeConfig.FailOnArithmeticUnderflow = true + config.Fuzzing.Testing.AssertionTesting.PanicCodeConfig.FailOnCallUninitializedVariable = true + config.Fuzzing.Testing.AssertionTesting.PanicCodeConfig.FailOnEnumTypeConversionOutOfBounds = true + config.Fuzzing.Testing.AssertionTesting.PanicCodeConfig.FailOnDivideByZero = true + config.Fuzzing.Testing.AssertionTesting.PanicCodeConfig.FailOnIncorrectStorageAccess = true + config.Fuzzing.Testing.AssertionTesting.PanicCodeConfig.FailOnOutOfBoundsArrayAccess = true + config.Fuzzing.Testing.AssertionTesting.PanicCodeConfig.FailOnPopEmptyArray = true + config.Fuzzing.Testing.AssertionTesting.TestViewMethods = true config.Fuzzing.Testing.PropertyTesting.Enabled = false - config.Fuzzing.Testing.AssertionTesting.Enabled = true + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer err := f.fuzzer.Start() assert.NoError(t, err) - // Check for failed assertion tests. assertFailedTestsExpected(f, true) }, @@ -88,10 +113,10 @@ func TestAssertionsNotRequire(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/assertions/assert_not_require.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} config.Fuzzing.TestLimit = 500 config.Fuzzing.Testing.PropertyTesting.Enabled = false - config.Fuzzing.Testing.AssertionTesting.Enabled = true + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -110,11 +135,10 @@ func TestAssertionsAndProperties(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/assertions/assert_and_property_test.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} config.Fuzzing.TestLimit = 500 config.Fuzzing.Testing.StopOnFailedTest = false - config.Fuzzing.Testing.PropertyTesting.Enabled = true - config.Fuzzing.Testing.AssertionTesting.Enabled = true + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -127,17 +151,50 @@ func TestAssertionsAndProperties(t *testing.T) { }) } +// TestOptimizationMode runs a test to ensure that optimization mode works as expected +func TestOptimizationMode(t *testing.T) { + filePaths := []string{ + "testdata/contracts/optimizations/optimize.sol", + } + for _, filePath := range filePaths { + runFuzzerTest(t, &fuzzerSolcFileTest{ + filePath: filePath, + configUpdates: func(config *config.ProjectConfig) { + config.Fuzzing.TargetContracts = []string{"TestContract"} + config.Fuzzing.TestLimit = 10_000 // this test should expose a failure quickly. + config.Fuzzing.Testing.PropertyTesting.Enabled = false + config.Fuzzing.Testing.AssertionTesting.Enabled = false + }, + method: func(f *fuzzerTestContext) { + // Start the fuzzer + err := f.fuzzer.Start() + assert.NoError(t, err) + + // Check the value found for optimization test + var testCases = f.fuzzer.TestCasesWithStatus(TestCaseStatusPassed) + for _, testCase := range testCases { + if optimizationTestCase, ok := testCase.(*OptimizationTestCase); ok { + assert.EqualValues(t, optimizationTestCase.Value().Cmp(big.NewInt(4241)), 0) + } + } + }, + }) + } +} + // TestChainBehaviour runs tests to ensure the chain behaves as expected. func TestChainBehaviour(t *testing.T) { // Run a test to simulate out of gas errors to make sure its handled well by the Chain and does not panic. runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/chain/tx_out_of_gas.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} config.Fuzzing.Workers = 1 config.Fuzzing.TestLimit = uint64(config.Fuzzing.CallSequenceLength) // we just need a few oog txs to test config.Fuzzing.Timeout = 10 // to be safe, we set a 10s timeout config.Fuzzing.TransactionGasLimit = 500000 // we set this low, so contract execution runs out of gas earlier. + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -157,6 +214,7 @@ func TestCheatCodes(t *testing.T) { "testdata/contracts/cheat_codes/utils/to_string.sol", "testdata/contracts/cheat_codes/utils/sign.sol", "testdata/contracts/cheat_codes/utils/parse.sol", + "testdata/contracts/cheat_codes/vm/snapshot_and_revert_to.sol", "testdata/contracts/cheat_codes/vm/coinbase.sol", "testdata/contracts/cheat_codes/vm/chain_id.sol", "testdata/contracts/cheat_codes/vm/deal.sol", @@ -185,7 +243,7 @@ func TestCheatCodes(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: filePath, configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} // some tests require full sequence + revert to test fully config.Fuzzing.Workers = 3 @@ -193,7 +251,10 @@ func TestCheatCodes(t *testing.T) { // enable assertion testing only config.Fuzzing.Testing.PropertyTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false config.Fuzzing.Testing.AssertionTesting.Enabled = true + + config.Fuzzing.TestChainConfig.CheatCodeConfig.CheatCodesEnabled = true config.Fuzzing.TestChainConfig.CheatCodeConfig.EnableFFI = true }, method: func(f *fuzzerTestContext) { @@ -208,6 +269,59 @@ func TestCheatCodes(t *testing.T) { } } +// TestConsoleLog tests the console.log precompile contract by logging a variety of different primitive types and +// then failing. The execution trace for the failing call sequence should hold the various logs. +func TestConsoleLog(t *testing.T) { + // These are the logs that should show up in the execution trace + expectedLogs := []string{ + "2", + "68656c6c6f20776f726c64", // This is "hello world" in hex + "62797465", // This is "byte" in hex + "i is 2", + "% bool is true, addr is 0x0000000000000000000000000000000000000000, u is 100", + } + + filePaths := []string{ + "testdata/contracts/cheat_codes/console_log/console_log.sol", + } + for _, filePath := range filePaths { + runFuzzerTest(t, &fuzzerSolcFileTest{ + filePath: filePath, + configUpdates: func(config *config.ProjectConfig) { + config.Fuzzing.TargetContracts = []string{"TestContract"} + config.Fuzzing.TestLimit = 10000 + config.Fuzzing.Testing.PropertyTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false + }, + method: func(f *fuzzerTestContext) { + // Start the fuzzer + err := f.fuzzer.Start() + assert.NoError(t, err) + + // Check for failed assertion tests. + failedTestCase := f.fuzzer.TestCasesWithStatus(TestCaseStatusFailed) + assert.NotEmpty(t, failedTestCase, "expected to have failed test cases") + + // Obtain our first failed test case, get the message, and verify it contains our assertion failed. + failingSequence := *failedTestCase[0].CallSequence() + assert.NotEmpty(t, failingSequence, "expected to have calls in the call sequence failing an assertion test") + + // Obtain the last call + lastCall := failingSequence[len(failingSequence)-1] + assert.NotNilf(t, lastCall.ExecutionTrace, "expected to have an execution trace attached to call sequence for this test") + + // Get the execution trace message + executionTraceMsg := lastCall.ExecutionTrace.Log().String() + + // Verify it contains all expected logs + for _, expectedLog := range expectedLogs { + assert.Contains(t, executionTraceMsg, expectedLog) + } + }, + }) + } +} + // TestDeploymentsInnerDeployments runs tests to ensure dynamically deployed contracts are detected by the Fuzzer and // their properties are tested appropriately. func TestDeploymentsInnerDeployments(t *testing.T) { @@ -220,10 +334,12 @@ func TestDeploymentsInnerDeployments(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: filePath, configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"InnerDeploymentFactory"} + config.Fuzzing.TargetContracts = []string{"InnerDeploymentFactory"} config.Fuzzing.TestLimit = 1_000 // this test should expose a failure quickly. config.Fuzzing.Testing.StopOnFailedContractMatching = true config.Fuzzing.Testing.TestAllContracts = true // test dynamically deployed contracts + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -241,10 +357,12 @@ func TestDeploymentsInnerDeployments(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/deployments/inner_deployment_on_construction.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"InnerDeploymentFactory"} + config.Fuzzing.TargetContracts = []string{"InnerDeploymentFactory"} config.Fuzzing.TestLimit = 1_000 // this test should expose a failure quickly. config.Fuzzing.Testing.StopOnFailedContractMatching = true config.Fuzzing.Testing.TestAllContracts = true // test dynamically deployed contracts + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -262,8 +380,57 @@ func TestDeploymentsInternalLibrary(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/deployments/internal_library.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestInternalLibrary"} + config.Fuzzing.TargetContracts = []string{"TestInternalLibrary"} config.Fuzzing.TestLimit = 100 // this test should expose a failure quickly. + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false + }, + method: func(f *fuzzerTestContext) { + // Start the fuzzer + err := f.fuzzer.Start() + assert.NoError(t, err) + + // Check for any failed tests and verify coverage was captured + assertFailedTestsExpected(f, false) + assertCorpusCallSequencesCollected(f, true) + }, + }) +} + +// TestDeploymentsWithPredeploy runs a test to ensure that predeployed contracts are instantiated correctly. +func TestDeploymentsWithPredeploy(t *testing.T) { + runFuzzerTest(t, &fuzzerSolcFileTest{ + filePath: "testdata/contracts/deployments/predeploy_contract.sol", + configUpdates: func(config *config.ProjectConfig) { + config.Fuzzing.TargetContracts = []string{"TestContract"} + config.Fuzzing.TargetContractsBalances = []*big.Int{big.NewInt(1)} + config.Fuzzing.TestLimit = 1000 // this test should expose a failure immediately + config.Fuzzing.Testing.PropertyTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false + config.Fuzzing.PredeployedContracts = map[string]string{"PredeployContract": "0x1234"} + }, + method: func(f *fuzzerTestContext) { + // Start the fuzzer + err := f.fuzzer.Start() + assert.NoError(t, err) + + // Check for any failed tests and verify coverage was captured + assertFailedTestsExpected(f, true) + assertCorpusCallSequencesCollected(f, true) + }, + }) +} + +// TestDeploymentsWithPayableConstructor runs a test to ensure that we can send ether to payable constructors +func TestDeploymentsWithPayableConstructors(t *testing.T) { + runFuzzerTest(t, &fuzzerSolcFileTest{ + filePath: "testdata/contracts/deployments/deploy_payable_constructors.sol", + configUpdates: func(config *config.ProjectConfig) { + config.Fuzzing.TargetContracts = []string{"FirstContract", "SecondContract"} + config.Fuzzing.TargetContractsBalances = []*big.Int{big.NewInt(0), big.NewInt(1e18)} + config.Fuzzing.TestLimit = 1 // this should happen immediately + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -277,7 +444,7 @@ func TestDeploymentsInternalLibrary(t *testing.T) { }) } -// TestDeploymentsInnerDeployments runs a test to ensure dynamically deployed contracts are detected by the Fuzzer and +// TestDeploymentsSelfDestruct runs a test to ensure dynamically deployed contracts are detected by the Fuzzer and // their properties are tested appropriately. func TestDeploymentsSelfDestruct(t *testing.T) { // These contracts provide functions to deploy inner contracts which have properties that will produce a failure. @@ -289,8 +456,12 @@ func TestDeploymentsSelfDestruct(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: filePath, configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"InnerDeploymentFactory"} + config.Fuzzing.TargetContracts = []string{"InnerDeploymentFactory"} config.Fuzzing.TestLimit = 500 // this test should expose a failure quickly. + config.Fuzzing.Testing.StopOnNoTests = false + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false + config.Fuzzing.Testing.TestAllContracts = true }, method: func(f *fuzzerTestContext) { // Subscribe to any mined block events globally. When receiving them, check contract changes for a @@ -323,20 +494,20 @@ func TestDeploymentsSelfDestruct(t *testing.T) { func TestExecutionTraces(t *testing.T) { expectedMessagesPerTest := map[string][]string{ "testdata/contracts/execution_tracing/call_and_deployment_args.sol": {"Hello from deployment args!", "Hello from call args!"}, - "testdata/contracts/execution_tracing/cheatcodes.sol": {"StdCheats.toString(true)"}, + "testdata/contracts/execution_tracing/cheatcodes.sol": {"StdCheats.toString(bool)(true)"}, "testdata/contracts/execution_tracing/event_emission.sol": {"TestEvent", "TestIndexedEvent", "TestMixedEvent", "Hello from event args!", "Hello from library event args!"}, "testdata/contracts/execution_tracing/proxy_call.sol": {"TestContract -> InnerDeploymentContract.setXY", "Hello from proxy call args!"}, "testdata/contracts/execution_tracing/revert_custom_error.sol": {"CustomError", "Hello from a custom error!"}, "testdata/contracts/execution_tracing/revert_reasons.sol": {"RevertingContract was called and reverted."}, - "testdata/contracts/execution_tracing/self_destruct.sol": {"[selfdestruct]", "[assertion failed]"}, + "testdata/contracts/execution_tracing/self_destruct.sol": {"[selfdestruct]", "[panic: assertion failed]"}, } for filePath, expectedTraceMessages := range expectedMessagesPerTest { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: filePath, configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} config.Fuzzing.Testing.PropertyTesting.Enabled = false - config.Fuzzing.Testing.AssertionTesting.Enabled = true + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -356,7 +527,7 @@ func TestExecutionTraces(t *testing.T) { assert.NotNilf(t, lastCall.ExecutionTrace, "expected to have an execution trace attached to call sequence for this test") // Get the execution trace message - executionTraceMsg := lastCall.ExecutionTrace.String() + executionTraceMsg := lastCall.ExecutionTrace.Log().String() // Verify it contains all expected strings for _, expectedTraceMessage := range expectedTraceMessages { @@ -374,12 +545,11 @@ func TestTestingScope(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/deployments/testing_scope.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} config.Fuzzing.TestLimit = 1_000 // this test should expose a failure quickly. config.Fuzzing.Testing.TestAllContracts = testingAllContracts config.Fuzzing.Testing.StopOnFailedTest = false - config.Fuzzing.Testing.AssertionTesting.Enabled = true - config.Fuzzing.Testing.PropertyTesting.Enabled = true + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -409,7 +579,7 @@ func TestDeploymentsWithArgs(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/deployments/deployment_with_args.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"DeploymentWithArgs", "Dependent"} + config.Fuzzing.TargetContracts = []string{"DeploymentWithArgs", "Dependent"} config.Fuzzing.ConstructorArgs = map[string]map[string]any{ "DeploymentWithArgs": { "_x": "123456789", @@ -425,6 +595,8 @@ func TestDeploymentsWithArgs(t *testing.T) { } config.Fuzzing.Testing.StopOnFailedTest = false config.Fuzzing.TestLimit = 500 // this test should expose a failure quickly. + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -442,8 +614,10 @@ func TestValueGenerationGenerateAllTypes(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/value_generation/generate_all_types.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"GenerateAllTypes"} + config.Fuzzing.TargetContracts = []string{"GenerateAllTypes"} config.Fuzzing.TestLimit = 10_000 + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -475,7 +649,9 @@ func TestValueGenerationSolving(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: filePath, configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -490,6 +666,76 @@ func TestValueGenerationSolving(t *testing.T) { } } +// TestASTValueExtraction runs a test to ensure appropriate AST values can be mined out of a compiled source's AST. +func TestASTValueExtraction(t *testing.T) { + // Define our expected values to be mined. + expectedAddresses := []common.Address{ + common.HexToAddress("0x7109709ECfa91a80626fF3989D68f67F5b1DD12D"), + common.HexToAddress("0x1234567890123456789012345678901234567890"), + } + expectedIntegers := []string{ + // Unsigned integer tests + "111", // no denomination + "1", // 1 wei (base unit) + "2000000000", // 2 gwei + "5000000000000000000", // 5 ether + "6", // 6 seconds (base unit) + "420", // 7 minutes + "28800", // 8 hours + "777600", // 9 days + "6048000", // 10 weeks + + // Signed integer tests + "-111", // no denomination + "-1", // 1 wei (base unit) + "-2000000000", // 2 gwei + "-5000000000000000000", // 5 ether + "-6", // 6 seconds (base unit) + "-420", // 7 minutes + "-28800", // 8 hours + "-777600", // 9 days + "-6048000", // 10 weeks + } + expectedStrings := []string{ + "testString", + "testString2", + } + expectedByteSequences := make([][]byte, 0) // no tests yet + + // Run the fuzzer test + runFuzzerTest(t, &fuzzerSolcFileTest{ + filePath: "testdata/contracts/value_generation/ast_value_extraction.sol", + configUpdates: func(config *config.ProjectConfig) { + config.Fuzzing.TestLimit = 1 // stop immediately to simply see what values were mined. + config.Fuzzing.Testing.PropertyTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false + config.Fuzzing.TargetContracts = []string{"TestContract"} + }, + method: func(f *fuzzerTestContext) { + // Start the fuzzer + err := f.fuzzer.Start() + assert.NoError(t, err) + + // Verify all of our expected values exist + valueSet := f.fuzzer.BaseValueSet() + for _, expectedAddr := range expectedAddresses { + assert.True(t, valueSet.ContainsAddress(expectedAddr), "Value set did not contain expected address: %v", expectedAddr.String()) + } + for _, expectedIntegerStr := range expectedIntegers { + expectedInteger, ok := new(big.Int).SetString(expectedIntegerStr, 10) + assert.True(t, ok, "Could not parse provided expected integer string in test: \"%v\"", expectedIntegerStr) + assert.True(t, valueSet.ContainsInteger(expectedInteger), "Value set did not contain expected integer: %v", expectedInteger.String()) + } + for _, expectedString := range expectedStrings { + assert.True(t, valueSet.ContainsString(expectedString), "Value set did not contain expected string: \"%v\"", expectedString) + } + for _, expectedByteSequence := range expectedByteSequences { + assert.True(t, valueSet.ContainsBytes(expectedByteSequence), "Value set did not contain expected bytes: \"%v\"", hex.EncodeToString(expectedByteSequence)) + } + }, + }) +} + // TestVMCorrectness runs tests to ensure block properties are reported consistently within the EVM, as it's configured // by the chain.TestChain. func TestVMCorrectness(t *testing.T) { @@ -497,9 +743,11 @@ func TestVMCorrectness(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/vm_tests/block_number_increasing.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} config.Fuzzing.MaxBlockTimestampDelay = 1 // this contract require calls every block config.Fuzzing.MaxBlockNumberDelay = 1 // this contract require calls every block + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Start the fuzzer @@ -516,7 +764,7 @@ func TestVMCorrectness(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/vm_tests/block_number_increasing.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} config.Fuzzing.MaxBlockTimestampDelay = 1 // this contract require calls every block config.Fuzzing.MaxBlockNumberDelay = 1 // this contract require calls every block }, @@ -535,7 +783,7 @@ func TestVMCorrectness(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/vm_tests/block_hash_store_check.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} config.Fuzzing.TestLimit = 1_000 // this test should expose a failure quickly. config.Fuzzing.MaxBlockTimestampDelay = 1 // this contract require calls every block config.Fuzzing.MaxBlockNumberDelay = 1 // this contract require calls every block @@ -560,8 +808,10 @@ func TestCorpusReplayability(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/value_generation/match_uints_xy.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"TestContract"} + config.Fuzzing.TargetContracts = []string{"TestContract"} config.Fuzzing.CorpusDirectory = "corpus" + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Setup checks for event emissions @@ -576,7 +826,8 @@ func TestCorpusReplayability(t *testing.T) { // Cache current coverage maps originalCoverage := f.fuzzer.corpus.CoverageMaps() - originalCorpusSequenceCount := f.fuzzer.corpus.CallSequenceCount() + originalTotalCallSequences, originalTotalTestResults := f.fuzzer.corpus.CallSequenceEntryCount() + originalCorpusSequenceCount := originalTotalCallSequences + originalTotalTestResults // Next, set the fuzzer worker count to one, this allows us to count the call sequences executed before // solving a problem. We will verify the problem is solved with less or equal sequences tested, than @@ -590,8 +841,16 @@ func TestCorpusReplayability(t *testing.T) { assertCorpusCallSequencesCollected(f, true) newCoverage := f.fuzzer.corpus.CoverageMaps() - // Check to see if original and new coverage are the same - assert.True(t, originalCoverage.Equals(newCoverage)) + // Check to see if original and new coverage are the same (disregarding hit count) + successCovIncreased, revertCovIncreased, err := originalCoverage.Update(newCoverage) + assert.False(t, successCovIncreased) + assert.False(t, revertCovIncreased) + assert.NoError(t, err) + + successCovIncreased, revertCovIncreased, err = newCoverage.Update(originalCoverage) + assert.False(t, successCovIncreased) + assert.False(t, revertCovIncreased) + assert.NoError(t, err) // Verify that the fuzzer finished after fewer sequences than there are in the corpus assert.LessOrEqual(t, f.fuzzer.metrics.SequencesTested().Uint64(), uint64(originalCorpusSequenceCount)) @@ -599,14 +858,16 @@ func TestCorpusReplayability(t *testing.T) { }) } -// TestDeploymentOrderWithCoverage will ensure that changing the deployment order does not lead to the same coverage -// This is also proof that changing the order changes the addresses of the contracts leading to the coverage not being -// useful. +// TestDeploymentOrderWithCoverage will ensure that changing the order of deployment for the target contracts does not +// lead to the same coverage. This is also proof that changing the order changes the addresses of the contracts leading +// to the coverage not being useful. func TestDeploymentOrderWithCoverage(t *testing.T) { runFuzzerTest(t, &fuzzerSolcFileTest{ filePath: "testdata/contracts/deployments/deployment_order.sol", configUpdates: func(config *config.ProjectConfig) { - config.Fuzzing.DeploymentOrder = []string{"InheritedFirstContract", "InheritedSecondContract"} + config.Fuzzing.TargetContracts = []string{"InheritedFirstContract", "InheritedSecondContract"} + config.Fuzzing.Testing.AssertionTesting.Enabled = false + config.Fuzzing.Testing.OptimizationTesting.Enabled = false }, method: func(f *fuzzerTestContext) { // Setup checks for event emissions @@ -629,8 +890,8 @@ func TestDeploymentOrderWithCoverage(t *testing.T) { return nil }) - // Update the deployment order - f.fuzzer.config.Fuzzing.DeploymentOrder = []string{"InheritedSecondContract", "InheritedFirstContract"} + // Update the order of target contracts + f.fuzzer.config.Fuzzing.TargetContracts = []string{"InheritedSecondContract", "InheritedFirstContract"} // Note that the fuzzer won't spin up any workers or fuzz anything. We just want to test that the coverage // maps don't populate due to deployment order changes @@ -639,7 +900,49 @@ func TestDeploymentOrderWithCoverage(t *testing.T) { // Check to see if original and new coverage are the same newCoverage := f.fuzzer.corpus.CoverageMaps() - assert.False(t, originalCoverage.Equals(newCoverage)) + assert.False(t, originalCoverage.Equal(newCoverage)) }, }) } + +// TestTargetingFuncSignatures tests whether functions will be correctly whitelisted for testing +func TestTargetingFuncSignatures(t *testing.T) { + targets := []string{"TestContract.f(), TestContract.g()"} + runFuzzerTest(t, &fuzzerSolcFileTest{ + filePath: "testdata/contracts/filtering/target_and_exclude.sol", + configUpdates: func(config *config.ProjectConfig) { + config.Fuzzing.TargetContracts = []string{"TestContract"} + config.Fuzzing.Testing.TargetFunctionSignatures = targets + }, + method: func(f *fuzzerTestContext) { + for _, contract := range f.fuzzer.ContractDefinitions() { + // The targets should be the only functions tested, excluding h and i + reflect.DeepEqual(contract.AssertionTestMethods, targets) + + // ALL properties and optimizations should be tested + reflect.DeepEqual(contract.PropertyTestMethods, []string{"TestContract.property_a()"}) + reflect.DeepEqual(contract.OptimizationTestMethods, []string{"TestContract.optimize_b()"}) + } + }}) +} + +// TestExcludeFunctionSignatures tests whether functions will be blacklisted/excluded for testing +func TestExcludeFunctionSignatures(t *testing.T) { + excluded := []string{"TestContract.f(), TestContract.g()"} + runFuzzerTest(t, &fuzzerSolcFileTest{ + filePath: "testdata/contracts/filtering/target_and_exclude.sol", + configUpdates: func(config *config.ProjectConfig) { + config.Fuzzing.TargetContracts = []string{"TestContract"} + config.Fuzzing.Testing.ExcludeFunctionSignatures = excluded + }, + method: func(f *fuzzerTestContext) { + for _, contract := range f.fuzzer.ContractDefinitions() { + // Only h and i should be test since f and g are excluded + reflect.DeepEqual(contract.AssertionTestMethods, []string{"TestContract.h()", "TestContract.i()"}) + + // ALL properties and optimizations should be tested + reflect.DeepEqual(contract.PropertyTestMethods, []string{"TestContract.property_a()"}) + reflect.DeepEqual(contract.OptimizationTestMethods, []string{"TestContract.optimize_b()"}) + } + }}) +} diff --git a/fuzzing/fuzzer_test_methods_test.go b/fuzzing/fuzzer_test_methods_test.go index b4db1d51..80a7f152 100644 --- a/fuzzing/fuzzer_test_methods_test.go +++ b/fuzzing/fuzzer_test_methods_test.go @@ -80,14 +80,17 @@ func assertFailedTestsExpected(f *fuzzerTestContext, expectFailure bool) { // assertCorpusCallSequencesCollected will check to see whether we captured coverage-increasing call sequences in the // corpus. It asserts that the actual result matches the provided expected result. func assertCorpusCallSequencesCollected(f *fuzzerTestContext, expectCallSequences bool) { + // Obtain our count of mutable (often representing just non-reverted coverage increasing) sequences. + callSequenceCount, _ := f.fuzzer.corpus.CallSequenceEntryCount() + // Ensure we captured some coverage-increasing call sequences. if expectCallSequences { - assert.Greater(f.t, f.fuzzer.corpus.CallSequenceCount(), 0, "No coverage was captured") + assert.Greater(f.t, callSequenceCount, 0, "No coverage was captured") } // If we don't expect coverage-increasing call sequences, or it is not enabled, we should not get any coverage if !expectCallSequences || !f.fuzzer.config.Fuzzing.CoverageEnabled { - assert.EqualValues(f.t, 0, f.fuzzer.corpus.CallSequenceCount(), "Coverage was captured") + assert.EqualValues(f.t, 0, callSequenceCount, "Coverage was captured") } } diff --git a/fuzzing/fuzzer_worker.go b/fuzzing/fuzzer_worker.go index bc9b9c96..8ddeb78c 100644 --- a/fuzzing/fuzzer_worker.go +++ b/fuzzing/fuzzer_worker.go @@ -2,6 +2,9 @@ package fuzzing import ( "fmt" + "math/big" + "math/rand" + "github.com/crytic/medusa/chain" "github.com/crytic/medusa/fuzzing/calls" fuzzerTypes "github.com/crytic/medusa/fuzzing/contracts" @@ -10,8 +13,6 @@ import ( "github.com/crytic/medusa/utils" "github.com/ethereum/go-ethereum/common" "golang.org/x/exp/maps" - "math/big" - "math/rand" ) // FuzzerWorker describes a single thread worker utilizing its own go-ethereum test node to run property tests against @@ -33,16 +34,25 @@ type FuzzerWorker struct { // deployedContracts describes a mapping of deployed contractDefinitions and the addresses they were deployed to. deployedContracts map[common.Address]*fuzzerTypes.Contract + // stateChangingMethods is a list of contract functions which are suspected of changing contract state // (non-read-only). A sequence of calls is generated by the FuzzerWorker, targeting stateChangingMethods // before executing tests. stateChangingMethods []fuzzerTypes.DeployedContractMethod + // pureMethods is a list of contract functions which are side-effect free with respect to the EVM (view and/or pure in terms of Solidity mutability). + pureMethods []fuzzerTypes.DeployedContractMethod + // randomProvider provides random data as inputs to decisions throughout the worker. randomProvider *rand.Rand // sequenceGenerator creates entirely new or mutated call sequences based on corpus call sequences, for use in // fuzzing campaigns. sequenceGenerator *CallSequenceGenerator + + // shrinkingValueMutator is a value mutator which is used to mutate existing call sequence values in an attempt to shrink + // their values, in the call sequence shrinking process. + shrinkingValueMutator valuegeneration.ValueMutator + // valueSet defines a set derived from Fuzzer.BaseValueSet which is further populated with runtime values by the // FuzzerWorker. It is the value set shared with the underlying valueGenerator. valueSet *valuegeneration.ValueSet @@ -64,17 +74,25 @@ func newFuzzerWorker(fuzzer *Fuzzer, workerIndex int, randomProvider *rand.Rand) return nil, err } + // Create a new shrinking value mutator for this new worker. + shrinkingValueMutator, err := fuzzer.Hooks.NewShrinkingValueMutatorFunc(fuzzer, valueSet, randomProvider) + if err != nil { + return nil, err + } + // Create a new worker with the data provided. worker := &FuzzerWorker{ workerIndex: workerIndex, fuzzer: fuzzer, deployedContracts: make(map[common.Address]*fuzzerTypes.Contract), stateChangingMethods: make([]fuzzerTypes.DeployedContractMethod, 0), + pureMethods: make([]fuzzerTypes.DeployedContractMethod, 0), coverageTracer: nil, randomProvider: randomProvider, valueSet: valueSet, } worker.sequenceGenerator = NewCallSequenceGenerator(worker, callSequenceGenConfig) + worker.shrinkingValueMutator = shrinkingValueMutator return worker, nil } @@ -124,6 +142,11 @@ func (fw *FuzzerWorker) ValueGenerator() valuegeneration.ValueGenerator { return fw.sequenceGenerator.config.ValueGenerator } +// ValueMutator obtains the value mutator used by this worker. +func (fw *FuzzerWorker) ValueMutator() valuegeneration.ValueMutator { + return fw.sequenceGenerator.config.ValueMutator +} + // getNewCorpusCallSequenceWeight returns a big integer representing the weight that a new corpus item being added now // should have in the corpus' weighted random chooser. func (fw *FuzzerWorker) getNewCorpusCallSequenceWeight() *big.Int { @@ -134,6 +157,13 @@ func (fw *FuzzerWorker) getNewCorpusCallSequenceWeight() *big.Int { // onChainContractDeploymentAddedEvent is the event callback used when the chain detects a new contract deployment. // It attempts bytecode matching and updates the list of deployed contracts the worker should use for fuzz testing. func (fw *FuzzerWorker) onChainContractDeploymentAddedEvent(event chain.ContractDeploymentsAddedEvent) error { + // Do not track the deployed contract if the contract deployment was a dynamic one and testAllContracts is false + if !fw.fuzzer.config.Fuzzing.Testing.TestAllContracts && event.DynamicDeployment { + // Add the contract address to our value set so our generator can use it in calls. + fw.valueSet.AddAddress(event.Contract.Address) + return nil + } + // Add the contract address to our value set so our generator can use it in calls. fw.valueSet.AddAddress(event.Contract.Address) @@ -151,8 +181,8 @@ func (fw *FuzzerWorker) onChainContractDeploymentAddedEvent(event chain.Contract // Set our deployed contract address in our deployed contract lookup, so we can reference it later. fw.deployedContracts[event.Contract.Address] = matchedDefinition - // Update our state changing methods - fw.updateStateChangingMethods() + // Update our methods + fw.updateMethods() // Emit an event indicating the worker detected a new contract deployment on its chain. err := fw.Events.ContractAdded.Publish(FuzzerWorkerContractAddedEvent{ @@ -182,8 +212,8 @@ func (fw *FuzzerWorker) onChainContractDeploymentRemovedEvent(event chain.Contra // Remove the contract from our deployed contracts mapping the worker maintains. delete(fw.deployedContracts, event.Contract.Address) - // Update our state changing methods - fw.updateStateChangingMethods() + // Update our methods + fw.updateMethods() // Emit an event indicating the worker detected the removal of a previously deployed contract on its chain. err := fw.Events.ContractDeleted.Publish(FuzzerWorkerContractDeletedEvent{ @@ -197,30 +227,36 @@ func (fw *FuzzerWorker) onChainContractDeploymentRemovedEvent(event chain.Contra return nil } -// updateStateChangingMethods updates the list of state changing methods used by the worker by re-evaluating them +// updateMethods updates the list of methods used by the worker by re-evaluating them // from the deployedContracts lookup. -func (fw *FuzzerWorker) updateStateChangingMethods() { - // Clear our list of state changing methods +func (fw *FuzzerWorker) updateMethods() { + // Clear our list of methods fw.stateChangingMethods = make([]fuzzerTypes.DeployedContractMethod, 0) + fw.pureMethods = make([]fuzzerTypes.DeployedContractMethod, 0) // Loop through each deployed contract for contractAddress, contractDefinition := range fw.deployedContracts { // If we deployed the contract, also enumerate property tests and state changing methods. - for _, method := range contractDefinition.CompiledContract().Abi.Methods { - if !method.IsConstant() { - // Any non-constant method should be tracked as a state changing method. + for _, method := range contractDefinition.AssertionTestMethods { + // Any non-constant method should be tracked as a state changing method. + if method.IsConstant() { + // Only track the pure/view method if testing view methods is enabled + if fw.fuzzer.config.Fuzzing.Testing.AssertionTesting.TestViewMethods { + fw.pureMethods = append(fw.pureMethods, fuzzerTypes.DeployedContractMethod{Address: contractAddress, Contract: contractDefinition, Method: method}) + } + } else { fw.stateChangingMethods = append(fw.stateChangingMethods, fuzzerTypes.DeployedContractMethod{Address: contractAddress, Contract: contractDefinition, Method: method}) } } } } -// testCallSequence tests a call message sequence against the underlying FuzzerWorker's Chain and calls every +// testNextCallSequence tests a call message sequence against the underlying FuzzerWorker's Chain and calls every // CallSequenceTestFunc registered with the parent Fuzzer to update any test results. If any call message in the // sequence is nil, a call message will be created in its place, targeting a state changing method of a contract // deployed in the Chain. // Returns the length of the call sequence tested, any requests for call sequence shrinking, or an error if one occurs. -func (fw *FuzzerWorker) testCallSequence() (calls.CallSequence, []ShrinkCallSequenceRequest, error) { +func (fw *FuzzerWorker) testNextCallSequence() (calls.CallSequence, []ShrinkCallSequenceRequest, error) { // After testing the sequence, we'll want to rollback changes to reset our testing state. var err error defer func() { @@ -250,7 +286,7 @@ func (fw *FuzzerWorker) testCallSequence() (calls.CallSequence, []ShrinkCallSequ executionCheckFunc := func(currentlyExecutedSequence calls.CallSequence) (bool, error) { // Check for updates to coverage and corpus. // If we detect coverage changes, add this sequence with weight as 1 + sequences tested (to avoid zero weights) - err := fw.fuzzer.corpus.AddCallSequenceIfCoverageChanged(currentlyExecutedSequence, fw.getNewCorpusCallSequenceWeight(), true) + err := fw.fuzzer.corpus.CheckSequenceCoverageAndUpdate(currentlyExecutedSequence, fw.getNewCorpusCallSequenceWeight(), true) if err != nil { return true, err } @@ -267,6 +303,8 @@ func (fw *FuzzerWorker) testCallSequence() (calls.CallSequence, []ShrinkCallSequ // Update our metrics fw.workerMetrics().callsTested.Add(fw.workerMetrics().callsTested, big.NewInt(1)) + lastCallSequenceElement := currentlyExecutedSequence[len(currentlyExecutedSequence)-1] + fw.workerMetrics().gasUsed.Add(fw.workerMetrics().gasUsed, new(big.Int).SetUint64(lastCallSequenceElement.ChainReference.Block.MessageResults[lastCallSequenceElement.ChainReference.TransactionIndex].Receipt.GasUsed)) // If our fuzzer context is done, exit out immediately without results. if utils.CheckContextDone(fw.fuzzer.ctx) { @@ -292,8 +330,8 @@ func (fw *FuzzerWorker) testCallSequence() (calls.CallSequence, []ShrinkCallSequ // If this was not a new call sequence, indicate not to save the shrunken result to the corpus again. if !isNewSequence { - for _, shrinkRequest := range shrinkCallSequenceRequests { - shrinkRequest.RecordResultInCorpus = false + for i := 0; i < len(shrinkCallSequenceRequests); i++ { + shrinkCallSequenceRequests[i].RecordResultInCorpus = false } } @@ -301,13 +339,11 @@ func (fw *FuzzerWorker) testCallSequence() (calls.CallSequence, []ShrinkCallSequ return testedCallSequence, shrinkCallSequenceRequests, nil } -// shrinkCallSequence takes a provided call sequence and attempts to shrink it by looking for redundant -// calls which can be removed that continue to satisfy the provided shrink verifier. -// Returns a call sequence that was optimized to include as little calls as possible to trigger the -// expected conditions, or an error if one occurred. -func (fw *FuzzerWorker) shrinkCallSequence(callSequence calls.CallSequence, shrinkRequest ShrinkCallSequenceRequest) (calls.CallSequence, error) { - // In case of any error, we defer an operation to revert our chain state. We purposefully ignore errors from it to - // prioritize any others which occurred. +// testShrunkenCallSequence tests a provided shrunken call sequence to verify it continues to satisfy the provided +// shrink verifier. Chain state is reverted to the testing base prior to returning. +// Returns a boolean indicating if the shrunken call sequence is valid for a given shrink request, or an error if one occurred. +func (fw *FuzzerWorker) testShrunkenCallSequence(possibleShrunkSequence calls.CallSequence, shrinkRequest ShrinkCallSequenceRequest) (bool, error) { + // After testing the sequence, we'll want to rollback changes to reset our testing state. var err error defer func() { if err == nil { @@ -315,91 +351,167 @@ func (fw *FuzzerWorker) shrinkCallSequence(callSequence calls.CallSequence, shri } }() - // Define a variable to track our most optimized sequence across all optimization iterations. - optimizedSequence := callSequence - - for i := 0; i < len(optimizedSequence); { - // Recreate our current optimized sequence without the item at this index - possibleShrunkSequence, err := optimizedSequence.Clone() - if err != nil { - return nil, err + // Our "fetch next call method" method will simply fetch and fix the call message in case any fields are not correct due to shrinking. + fetchElementFunc := func(currentIndex int) (*calls.CallSequenceElement, error) { + // If we are at the end of our sequence, return nil indicating we should stop executing. + if currentIndex >= len(possibleShrunkSequence) { + return nil, nil } - possibleShrunkSequence = append(possibleShrunkSequence[:i], possibleShrunkSequence[i+1:]...) - // Our "fetch next call method" method will simply fetch and fix the call message in case any fields are not correct due to shrinking. - fetchElementFunc := func(currentIndex int) (*calls.CallSequenceElement, error) { - // If we are at the end of our sequence, return nil indicating we should stop executing. - if currentIndex >= len(possibleShrunkSequence) { - return nil, nil - } + possibleShrunkSequence[currentIndex].Call.FillFromTestChainProperties(fw.chain) + return possibleShrunkSequence[currentIndex], nil + } - possibleShrunkSequence[currentIndex].Call.FillFromTestChainProperties(fw.chain) - return possibleShrunkSequence[currentIndex], nil + // Our "post-execution check" method will check coverage and call all testing functions. If one returns a + // request for a shrunk call sequence, we exit our call sequence execution immediately to go fulfill the shrink + // request. + executionCheckFunc := func(currentlyExecutedSequence calls.CallSequence) (bool, error) { + // Check for updates to coverage and corpus (using only the section of the sequence we tested so far). + // If we detect coverage changes, add this sequence. + seqErr := fw.fuzzer.corpus.CheckSequenceCoverageAndUpdate(currentlyExecutedSequence, fw.getNewCorpusCallSequenceWeight(), true) + if seqErr != nil { + return true, seqErr } - // Our "post-execution check" method will check coverage and call all testing functions. If one returns a - // request for a shrunk call sequence, we exit our call sequence execution immediately to go fulfill the shrink - // request. - executionCheckFunc := func(currentlyExecutedSequence calls.CallSequence) (bool, error) { - // Check for updates to coverage and corpus (using only the section of the sequence we tested so far). - // If we detect coverage changes, add this sequence. - err := fw.fuzzer.corpus.AddCallSequenceIfCoverageChanged(currentlyExecutedSequence, fw.getNewCorpusCallSequenceWeight(), true) - if err != nil { - return true, err - } + // If our fuzzer context is done, exit out immediately without results. + if utils.CheckContextDone(fw.fuzzer.ctx) { + return true, nil + } - // If our fuzzer context is done, exit out immediately without results. - if utils.CheckContextDone(fw.fuzzer.ctx) { - return true, nil - } + return false, nil + } - return false, nil - } + // Execute our call sequence. + _, err = calls.ExecuteCallSequenceIteratively(fw.chain, fetchElementFunc, executionCheckFunc) + if err != nil { + return false, err + } + + // If our fuzzer context is done, exit out immediately without results. + if utils.CheckContextDone(fw.fuzzer.ctx) { + return false, nil + } - // Execute our call sequence. - testedPossibleShrunkSequence, err := calls.ExecuteCallSequenceIteratively(fw.chain, fetchElementFunc, executionCheckFunc) + // Check if our verifier signalled that we met our conditions + validShrunkSequence := false + if len(possibleShrunkSequence) > 0 { + validShrunkSequence, err = shrinkRequest.VerifierFunction(fw, possibleShrunkSequence) if err != nil { - return nil, err + return false, err } + } + return validShrunkSequence, nil +} - // If our fuzzer context is done, exit out immediately without results. - if utils.CheckContextDone(fw.fuzzer.ctx) { - return nil, nil - } +// shrinkCallSequence takes a provided call sequence and attempts to shrink it by looking for redundant +// calls which can be removed, and values which can be minimized, while continuing to satisfy the provided shrink +// verifier. +// +// This function should *always* be called if there are shrink requests, and should always report a result, +// even if it is the original sequence provided. +// +// Returns a call sequence that was optimized to include as little calls as possible to trigger the +// expected conditions, or an error if one occurred. +func (fw *FuzzerWorker) shrinkCallSequence(callSequence calls.CallSequence, shrinkRequest ShrinkCallSequenceRequest) (calls.CallSequence, error) { + // Define a variable to track our most optimized sequence across all optimization iterations. + optimizedSequence := callSequence - // Check if our verifier signalled that we met our conditions - validShrunkSequence := false - if len(testedPossibleShrunkSequence) > 0 { - validShrunkSequence, err = shrinkRequest.VerifierFunction(fw, testedPossibleShrunkSequence) - if err != nil { - return nil, err + // Obtain our shrink limits and begin shrinking. + shrinkIteration := uint64(0) + shrinkLimit := fw.fuzzer.config.Fuzzing.ShrinkLimit + shrinkingEnded := func() bool { + return shrinkIteration >= shrinkLimit || utils.CheckContextDone(fw.fuzzer.ctx) + } + if shrinkLimit > 0 { + // The first pass of shrinking is greedy towards trying to remove any unnecessary calls. + // For each call in the sequence, the following removal strategies are used: + // 1) Plain removal (lower block/time gap between surrounding blocks, maintain properties of max delay) + // 2) Add block/time delay to previous call (retain original block/time, possibly exceed max delays) + // At worst, this costs `2 * len(callSequence)` shrink iterations. + fw.workerMetrics().shrinking = true + fw.fuzzer.logger.Info(fmt.Sprintf("[Worker %d] Shrinking call sequence with %d call(s)", fw.workerIndex, len(callSequence))) + + for removalStrategy := 0; removalStrategy < 2 && !shrinkingEnded(); removalStrategy++ { + for i := len(optimizedSequence) - 1; i >= 0 && !shrinkingEnded(); i-- { + // Recreate our current optimized sequence without the item at this index + possibleShrunkSequence, err := optimizedSequence.Clone() + removedCall := possibleShrunkSequence[i] + if err != nil { + return nil, err + } + possibleShrunkSequence = append(possibleShrunkSequence[:i], possibleShrunkSequence[i+1:]...) + + // Exercise the next removal strategy for this call. + if removalStrategy == 0 { + // Case 1: Plain removal. + } else if removalStrategy == 1 { + // Case 2: Add block/time delay to previous call. + if i > 0 { + possibleShrunkSequence[i-1].BlockNumberDelay += removedCall.BlockNumberDelay + possibleShrunkSequence[i-1].BlockTimestampDelay += removedCall.BlockTimestampDelay + } + } + + // Test the shrunken sequence. + validShrunkSequence, err := fw.testShrunkenCallSequence(possibleShrunkSequence, shrinkRequest) + shrinkIteration++ + if err != nil { + return nil, err + } + + // If the current sequence satisfied our conditions, set it as our optimized sequence. + if validShrunkSequence { + optimizedSequence = possibleShrunkSequence + } } } - // After testing the sequence, we'll want to rollback changes to reset our testing state. - if err = fw.chain.RevertToBlockNumber(fw.testingBaseBlockNumber); err != nil { - return nil, err - } - - // If this current sequence satisfied our conditions, set it as our optimized sequence. - if validShrunkSequence { - optimizedSequence = testedPossibleShrunkSequence - } else { - // We didn't remove an item at this index, so we'll iterate to the next one. - i++ + // The second pass of shrinking attempts to shrink values for each call in our call sequence. + // This is performed exhaustively in a round-robin fashion for each call, until the shrink limit is hit. + for !shrinkingEnded() { + for i := len(optimizedSequence) - 1; i >= 0 && !shrinkingEnded(); i-- { + // Clone the optimized sequence. + possibleShrunkSequence, _ := optimizedSequence.Clone() + + // Loop for each argument in the currently indexed call to mutate it. + abiValuesMsgData := possibleShrunkSequence[i].Call.DataAbiValues + for j := 0; j < len(abiValuesMsgData.InputValues); j++ { + mutatedInput, err := valuegeneration.MutateAbiValue(fw.sequenceGenerator.config.ValueGenerator, fw.shrinkingValueMutator, &abiValuesMsgData.Method.Inputs[j].Type, abiValuesMsgData.InputValues[j]) + if err != nil { + return nil, fmt.Errorf("error when shrinking call sequence input argument: %v", err) + } + abiValuesMsgData.InputValues[j] = mutatedInput + } + + // Re-encode the message's calldata + possibleShrunkSequence[i].Call.WithDataAbiValues(abiValuesMsgData) + + // Test the shrunken sequence. + validShrunkSequence, err := fw.testShrunkenCallSequence(possibleShrunkSequence, shrinkRequest) + shrinkIteration++ + if err != nil { + return nil, err + } + + // If this current sequence satisfied our conditions, set it as our optimized sequence. + if validShrunkSequence { + optimizedSequence = possibleShrunkSequence + } + } } + fw.workerMetrics().shrinking = false } // If the shrink request wanted the sequence recorded in the corpus, do so now. if shrinkRequest.RecordResultInCorpus { - err = fw.fuzzer.corpus.AddCallSequence(optimizedSequence, fw.getNewCorpusCallSequenceWeight(), true) + err := fw.fuzzer.corpus.AddTestResultCallSequence(optimizedSequence, fw.getNewCorpusCallSequenceWeight(), true) if err != nil { return nil, err } } - // We have a finalized call sequence, re-execute it, so our current chain state is representative of post-execution. - _, err = calls.ExecuteCallSequence(fw.chain, optimizedSequence) + // Reset our state before running tracing in FinishedCallback. + err := fw.chain.RevertToBlockNumber(fw.testingBaseBlockNumber) if err != nil { return nil, err } @@ -407,19 +519,15 @@ func (fw *FuzzerWorker) shrinkCallSequence(callSequence calls.CallSequence, shri // Shrinking is complete. If our config specified we want all result sequences to have execution traces attached, // attach them now to each element in the sequence. Otherwise, call sequences will only have traces that the // test providers choose to attach themselves. - if fw.fuzzer.config.Fuzzing.Testing.TraceAll { - err = optimizedSequence.AttachExecutionTraces(fw.chain, fw.fuzzer.contractDefinitions) - if err != nil { - return nil, err - } - } - - // After we finished shrinking, report our result and return it. - err = shrinkRequest.FinishedCallback(fw, optimizedSequence) + err = shrinkRequest.FinishedCallback(fw, optimizedSequence, fw.fuzzer.config.Fuzzing.Testing.TraceAll) if err != nil { return nil, err } + // After testing the sequence, we'll want to rollback changes to reset our testing state. + if err = fw.chain.RevertToBlockNumber(fw.testingBaseBlockNumber); err != nil { + return nil, err + } return optimizedSequence, err } @@ -448,7 +556,7 @@ func (fw *FuzzerWorker) run(baseTestChain *chain.TestChain) (bool, error) { // If we have coverage-guided fuzzing enabled, create a tracer to collect coverage and connect it to the chain. if fw.fuzzer.config.Fuzzing.CoverageEnabled { fw.coverageTracer = coverage.NewCoverageTracer() - initializedChain.AddTracer(fw.coverageTracer, true, false) + initializedChain.AddTracer(fw.coverageTracer.NativeTracer(), true, false) } return nil }) @@ -458,6 +566,9 @@ func (fw *FuzzerWorker) run(baseTestChain *chain.TestChain) (bool, error) { return false, err } + // Defer the closing of the test chain object + defer fw.chain.Close() + // Emit an event indicating the worker has setup its chain. err = fw.Events.FuzzerWorkerChainSetup.Publish(FuzzerWorkerChainSetupEvent{ Worker: fw, @@ -493,7 +604,7 @@ func (fw *FuzzerWorker) run(baseTestChain *chain.TestChain) (bool, error) { } // Test a new sequence - callSequence, shrinkVerifiers, err := fw.testCallSequence() + callSequence, shrinkVerifiers, err := fw.testNextCallSequence() if err != nil { return false, err } diff --git a/fuzzing/fuzzer_worker_sequence_generator.go b/fuzzing/fuzzer_worker_sequence_generator.go index d57d05fe..b0bd3557 100644 --- a/fuzzing/fuzzer_worker_sequence_generator.go +++ b/fuzzing/fuzzer_worker_sequence_generator.go @@ -2,11 +2,13 @@ package fuzzing import ( "fmt" + "math/big" + "github.com/crytic/medusa/fuzzing/calls" + "github.com/crytic/medusa/fuzzing/contracts" "github.com/crytic/medusa/fuzzing/valuegeneration" "github.com/crytic/medusa/utils" "github.com/crytic/medusa/utils/randomutils" - "math/big" ) // CallSequenceGenerator generates call sequences iteratively per element, for use in fuzzing campaigns. It is attached @@ -84,10 +86,13 @@ type CallSequenceGeneratorConfig struct { // number of calls from each. RandomMutatedInterleaveAtRandomWeight uint64 - // ValueGenerator defines the value provider to use when generating or mutating call sequences. This is used both + // ValueGenerator defines the value provider to use when generating new values for call sequences. This is used both // for ABI call data generation, and generation of additional values such as the "value" field of a // transaction/call. ValueGenerator valuegeneration.ValueGenerator + + // ValueMutator defines the value provider to use when mutating corpus call sequences. + ValueMutator valuegeneration.ValueMutator } // CallSequenceGeneratorFunc defines a method used to populate a provided call sequence with generated calls. @@ -189,7 +194,7 @@ func (g *CallSequenceGenerator) InitializeNextSequence() (bool, error) { g.fetchIndex = 0 g.prefetchModifyCallFunc = nil - // Check if there are any previously une-xecuted corpus call sequences. If there are, the fuzzer should execute + // Check if there are any previously un-executed corpus call sequences. If there are, the fuzzer should execute // those first. unexecutedSequence := g.worker.fuzzer.corpus.UnexecutedCallSequence() if unexecutedSequence != nil { @@ -202,7 +207,7 @@ func (g *CallSequenceGenerator) InitializeNextSequence() (bool, error) { // If this provider has no corpus mutation methods or corpus call sequences, we return a call sequence with // nil elements to signal that we want an entirely new sequence. - if g.mutationStrategyChooser.ChoiceCount() == 0 || g.worker.fuzzer.corpus.ActiveCallSequenceCount() == 0 { + if g.mutationStrategyChooser.ChoiceCount() == 0 || g.worker.fuzzer.corpus.ActiveMutableSequenceCount() == 0 { return true, nil } @@ -257,23 +262,40 @@ func (g *CallSequenceGenerator) PopSequenceElement() (*calls.CallSequenceElement } } + // Update the element with the current nonce for the associated chain. + element.Call.FillFromTestChainProperties(g.worker.chain) + // Update our base sequence, advance our position, and return the processed element from this round. g.baseSequence[g.fetchIndex] = element g.fetchIndex++ return element, nil } -// generateNewElement generates a new call sequence element which targets a state changing method in a contract +// generateNewElement generates a new call sequence element which targets a method in a contract // deployed to the CallSequenceGenerator's parent FuzzerWorker chain, with fuzzed call data. // Returns the call sequence element, or an error if one was encountered. func (g *CallSequenceGenerator) generateNewElement() (*calls.CallSequenceElement, error) { - // Verify we have state changing methods to call - if len(g.worker.stateChangingMethods) == 0 { - return nil, fmt.Errorf("cannot generate fuzzed tx as there are no state changing methods to call") + // Check to make sure that we have any functions to call + if len(g.worker.stateChangingMethods) == 0 && len(g.worker.pureMethods) == 0 { + return nil, fmt.Errorf("cannot generate fuzzed call as there are no methods to call") + } + + // Only call view functions if there are no state-changing methods + var callOnlyPureFunctions bool + if len(g.worker.stateChangingMethods) == 0 && len(g.worker.pureMethods) > 0 { + callOnlyPureFunctions = true } - // Select a random method and sender - selectedMethod := &g.worker.stateChangingMethods[g.worker.randomProvider.Intn(len(g.worker.stateChangingMethods))] + // Select a random method + // There is a 1/100 chance that a pure method will be invoked or if there are only pure functions that are callable + var selectedMethod *contracts.DeployedContractMethod + if (len(g.worker.pureMethods) > 0 && g.worker.randomProvider.Intn(100) == 0) || callOnlyPureFunctions { + selectedMethod = &g.worker.pureMethods[g.worker.randomProvider.Intn(len(g.worker.pureMethods))] + } else { + selectedMethod = &g.worker.stateChangingMethods[g.worker.randomProvider.Intn(len(g.worker.stateChangingMethods))] + } + + // Select a random sender selectedSender := g.worker.fuzzer.senders[g.worker.randomProvider.Intn(len(g.worker.fuzzer.senders))] // Generate fuzzed parameters for the function call @@ -298,7 +320,10 @@ func (g *CallSequenceGenerator) generateNewElement() (*calls.CallSequenceElement Method: &selectedMethod.Method, InputValues: args, }) - msg.FillFromTestChainProperties(g.worker.chain) + + if g.worker.fuzzer.config.Fuzzing.TestChainConfig.SkipAccountChecks { + msg.SkipAccountChecks = true + } // Determine our delay values for this element blockNumberDelay := uint64(0) @@ -329,12 +354,12 @@ func (g *CallSequenceGenerator) generateNewElement() (*calls.CallSequenceElement // Returns an error if one occurs. func callSeqGenFuncCorpusHead(sequenceGenerator *CallSequenceGenerator, sequence calls.CallSequence) error { // Obtain a call sequence from the corpus - corpusSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomCallSequence() + corpusSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomMutationTargetSequence() if err != nil { - return fmt.Errorf("could not obtain corpus call sequence for tail mutation: %v", err) + return fmt.Errorf("could not obtain corpus call sequence for head mutation: %v", err) } - // Determine a random position to slice the call sequence. + // Determine the length of the slice to be copied in the head. maxLength := utils.Min(len(sequence), len(corpusSequence)) copy(sequence, corpusSequence[:maxLength]) @@ -346,7 +371,7 @@ func callSeqGenFuncCorpusHead(sequenceGenerator *CallSequenceGenerator, sequence // Returns an error if one occurs. func callSeqGenFuncCorpusTail(sequenceGenerator *CallSequenceGenerator, sequence calls.CallSequence) error { // Obtain a call sequence from the corpus - corpusSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomCallSequence() + corpusSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomMutationTargetSequence() if err != nil { return fmt.Errorf("could not obtain corpus call sequence for tail mutation: %v", err) } @@ -365,11 +390,11 @@ func callSeqGenFuncCorpusTail(sequenceGenerator *CallSequenceGenerator, sequence // Returns an error if one occurs. func callSeqGenFuncSpliceAtRandom(sequenceGenerator *CallSequenceGenerator, sequence calls.CallSequence) error { // Obtain two corpus call sequence entries - headSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomCallSequence() + headSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomMutationTargetSequence() if err != nil { return fmt.Errorf("could not obtain head corpus call sequence for splice-at-random corpus mutation: %v", err) } - tailSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomCallSequence() + tailSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomMutationTargetSequence() if err != nil { return fmt.Errorf("could not obtain tail corpus call sequence for splice-at-random corpus mutation: %v", err) } @@ -397,11 +422,11 @@ func callSeqGenFuncSpliceAtRandom(sequenceGenerator *CallSequenceGenerator, sequ // Returns an error if one occurs. func callSeqGenFuncInterleaveAtRandom(sequenceGenerator *CallSequenceGenerator, sequence calls.CallSequence) error { // Obtain two corpus call sequence entries - firstSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomCallSequence() + firstSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomMutationTargetSequence() if err != nil { return fmt.Errorf("could not obtain first corpus call sequence for interleave-at-random corpus mutation: %v", err) } - secondSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomCallSequence() + secondSequence, err := sequenceGenerator.worker.fuzzer.corpus.RandomMutationTargetSequence() if err != nil { return fmt.Errorf("could not obtain second corpus call sequence for interleave-at-random corpus mutation: %v", err) } @@ -438,18 +463,21 @@ func callSeqGenFuncInterleaveAtRandom(sequenceGenerator *CallSequenceGenerator, // Returns an error if one occurs. func prefetchModifyCallFuncMutate(sequenceGenerator *CallSequenceGenerator, element *calls.CallSequenceElement) error { // If this element has no ABI value based call data, exit early. - if element.Call == nil || element.Call.MsgDataAbiValues == nil { + if element.Call == nil || element.Call.DataAbiValues == nil { return nil } // Loop for each input value and mutate it - abiValuesMsgData := element.Call.MsgDataAbiValues + abiValuesMsgData := element.Call.DataAbiValues for i := 0; i < len(abiValuesMsgData.InputValues); i++ { - mutatedInput, err := valuegeneration.MutateAbiValue(sequenceGenerator.config.ValueGenerator, &abiValuesMsgData.Method.Inputs[i].Type, abiValuesMsgData.InputValues[i]) + mutatedInput, err := valuegeneration.MutateAbiValue(sequenceGenerator.config.ValueGenerator, sequenceGenerator.config.ValueMutator, &abiValuesMsgData.Method.Inputs[i].Type, abiValuesMsgData.InputValues[i]) if err != nil { return fmt.Errorf("error when mutating call sequence input argument: %v", err) } abiValuesMsgData.InputValues[i] = mutatedInput } + // Re-encode the message's calldata + element.Call.WithDataAbiValues(abiValuesMsgData) + return nil } diff --git a/fuzzing/test_case.go b/fuzzing/test_case.go index d5e6a546..6be040e7 100644 --- a/fuzzing/test_case.go +++ b/fuzzing/test_case.go @@ -2,6 +2,7 @@ package fuzzing import ( "github.com/crytic/medusa/fuzzing/calls" + "github.com/crytic/medusa/logging" ) // TestCaseStatus defines the status of a TestCase as a string-represented enum. @@ -31,7 +32,11 @@ type TestCase interface { // Name describes the name of the test case. Name() string - // Message obtains a text-based printable message which describes the test result. + // LogMessage obtains a logging.LogBuffer that represents the result of the TestCase. This buffer can be passed to a logger for + // console or file logging. + LogMessage() *logging.LogBuffer + + // Message obtains a text-based printable message which describes the result of the AssertionTestCase. Message() string // ID obtains a unique identifier for a test result. If the same test fails, this ID should match for both diff --git a/fuzzing/test_case_assertion.go b/fuzzing/test_case_assertion.go index 8ea42eb0..7cdf3bb3 100644 --- a/fuzzing/test_case_assertion.go +++ b/fuzzing/test_case_assertion.go @@ -2,6 +2,8 @@ package fuzzing import ( "fmt" + "github.com/crytic/medusa/logging" + "github.com/crytic/medusa/logging/colors" "strings" "github.com/crytic/medusa/fuzzing/calls" @@ -11,10 +13,14 @@ import ( // AssertionTestCase describes a test being run by a AssertionTestCaseProvider. type AssertionTestCase struct { - status TestCaseStatus + // status describes the status of the test case + status TestCaseStatus + // targetContract describes the target contract where the test case was found targetContract *fuzzerTypes.Contract - targetMethod abi.Method - callSequence *calls.CallSequence + // targetMethod describes the target method for the test case + targetMethod abi.Method + // callSequence describes the call sequence that broke the assertion + callSequence *calls.CallSequence } // Status describes the TestCaseStatus used to define the current state of the test. @@ -33,18 +39,27 @@ func (t *AssertionTestCase) Name() string { return fmt.Sprintf("Assertion Test: %s.%s", t.targetContract.Name(), t.targetMethod.Sig) } -// Message obtains a text-based printable message which describes the test result. -func (t *AssertionTestCase) Message() string { +// LogMessage obtains a buffer that represents the result of the AssertionTestCase. This buffer can be passed to a logger for +// console or file logging. +func (t *AssertionTestCase) LogMessage() *logging.LogBuffer { // If the test failed, return a failure message. + buffer := logging.NewLogBuffer() if t.Status() == TestCaseStatusFailed { - return fmt.Sprintf( - "Test for method \"%s.%s\" failed after the following call sequence resulted in an assertion:\n%s", - t.targetContract.Name(), - t.targetMethod.Sig, - t.CallSequence().String(), - ) + buffer.Append(colors.RedBold, fmt.Sprintf("[%s] ", t.Status()), colors.Bold, t.Name(), colors.Reset, "\n") + buffer.Append(fmt.Sprintf("Test for method \"%s.%s\" resulted in an assertion failure after the following call sequence:\n", t.targetContract.Name(), t.targetMethod.Sig)) + buffer.Append(colors.Bold, "[Call Sequence]", colors.Reset, "\n") + buffer.Append(t.CallSequence().Log().Elements()...) + return buffer } - return "" + + buffer.Append(colors.GreenBold, fmt.Sprintf("[%s] ", t.Status()), colors.Bold, t.Name(), colors.Reset) + return buffer +} + +// Message obtains a text-based printable message which describes the result of the AssertionTestCase. +func (t *AssertionTestCase) Message() string { + // Internally, we just call log message and convert it to a string. This can be useful for 3rd party apps + return t.LogMessage().String() } // ID obtains a unique identifier for a test result. diff --git a/fuzzing/test_case_assertion_provider.go b/fuzzing/test_case_assertion_provider.go index 35f0d7e6..f9b9978a 100644 --- a/fuzzing/test_case_assertion_provider.go +++ b/fuzzing/test_case_assertion_provider.go @@ -1,13 +1,15 @@ package fuzzing import ( - "github.com/crytic/medusa/compilation/abiutils" - "github.com/crytic/medusa/fuzzing/calls" - "golang.org/x/exp/slices" + "math/big" "sync" + "github.com/crytic/medusa/compilation/abiutils" + "github.com/crytic/medusa/fuzzing/calls" + "github.com/crytic/medusa/fuzzing/config" "github.com/crytic/medusa/fuzzing/contracts" - "github.com/ethereum/go-ethereum/accounts/abi" + + "golang.org/x/exp/slices" ) // AssertionTestCaseProvider is am AssertionTestCase provider which spawns test cases for every contract method and @@ -24,8 +26,6 @@ type AssertionTestCaseProvider struct { testCasesLock sync.Mutex } -// Define our ABI method - // attachAssertionTestCaseProvider attaches a new AssertionTestCaseProvider to the Fuzzer and returns it. func attachAssertionTestCaseProvider(fuzzer *Fuzzer) *AssertionTestCaseProvider { // Create a test case provider @@ -43,13 +43,6 @@ func attachAssertionTestCaseProvider(fuzzer *Fuzzer) *AssertionTestCaseProvider return t } -// isTestableMethod checks whether the method is configured by the attached fuzzer to be a target of assertion testing. -// Returns true if this target should be tested, false otherwise. -func (t *AssertionTestCaseProvider) isTestableMethod(method abi.Method) bool { - // Only test constant methods (pure/view) if we are configured to. - return !method.IsConstant() || t.fuzzer.config.Fuzzing.Testing.AssertionTesting.TestViewMethods -} - // checkAssertionFailures checks the results of the last call for assertion failures. // Returns the method ID, a boolean indicating if an assertion test failed, or an error if one occurs. func (t *AssertionTestCaseProvider) checkAssertionFailures(callSequence calls.CallSequence) (*contracts.ContractMethodID, bool, error) { @@ -66,16 +59,19 @@ func (t *AssertionTestCaseProvider) checkAssertionFailures(callSequence calls.Ca } methodId := contracts.GetContractMethodID(lastCall.Contract, lastCallMethod) - // Check if we encountered an assertion error. - // Try to unpack our error and return data for a panic code and verify it matches the "assert failed" panic code. + // Check if we encountered an enabled panic code. + // Try to unpack our error and return data for a panic code and verify that that panic code should be treated as a failing case. // Solidity >0.8.0 introduced asserts failing as reverts but with special return data. But we indicate we also // want to be backwards compatible with older Solidity which simply hit an invalid opcode and did not actually // have a panic code. lastExecutionResult := lastCall.ChainReference.MessageResults().ExecutionResult panicCode := abiutils.GetSolidityPanicCode(lastExecutionResult.Err, lastExecutionResult.ReturnData, true) - encounteredAssertionFailure := panicCode != nil && panicCode.Uint64() == abiutils.PanicCodeAssertFailed + failure := false + if panicCode != nil { + failure = encounteredAssertionFailure(panicCode.Uint64(), t.fuzzer.config.Fuzzing.Testing.AssertionTesting.PanicCodeConfig) + } - return &methodId, encounteredAssertionFailure, nil + return &methodId, failure, nil } // onFuzzerStarting is the event handler triggered when the Fuzzer is starting a fuzzing campaign. It creates test cases @@ -86,17 +82,12 @@ func (t *AssertionTestCaseProvider) onFuzzerStarting(event FuzzerStartingEvent) // Create a test case for every test method. for _, contract := range t.fuzzer.ContractDefinitions() { - // If we're not testing all contracts, verify the current contract is one we specified in our deployment order. - if !t.fuzzer.config.Fuzzing.Testing.TestAllContracts && !slices.Contains(t.fuzzer.config.Fuzzing.DeploymentOrder, contract.Name()) { + // If we're not testing all contracts, verify the current contract is one we specified in our target contracts + if !t.fuzzer.config.Fuzzing.Testing.TestAllContracts && !slices.Contains(t.fuzzer.config.Fuzzing.TargetContracts, contract.Name()) { continue } - for _, method := range contract.CompiledContract().Abi.Methods { - // Verify this method is an assertion testable method - if !t.isTestableMethod(method) { - continue - } - + for _, method := range contract.AssertionTestMethods { // Create local variables to avoid pointer types in the loop being overridden. contract := contract method := method @@ -118,7 +109,7 @@ func (t *AssertionTestCaseProvider) onFuzzerStarting(event FuzzerStartingEvent) return nil } -// onFuzzerStarting is the event handler triggered when the Fuzzer is stopping the fuzzing campaign and all workers +// onFuzzerStopping is the event handler triggered when the Fuzzer is stopping the fuzzing campaign and all workers // have been destroyed. It clears state tracked for each FuzzerWorker and sets test cases in "running" states to // "passed". func (t *AssertionTestCaseProvider) onFuzzerStopping(event FuzzerStoppingEvent) error { @@ -210,10 +201,10 @@ func (t *AssertionTestCaseProvider) callSequencePostCallTest(worker *FuzzerWorke // If we encountered assertion failures on the same method, this shrunk sequence is satisfactory. return shrunkSeqTestFailed && *methodId == *shrunkSeqMethodId, nil }, - FinishedCallback: func(worker *FuzzerWorker, shrunkenCallSequence calls.CallSequence) error { - // When we're finished shrinking, attach an execution trace to the last call + FinishedCallback: func(worker *FuzzerWorker, shrunkenCallSequence calls.CallSequence, verboseTracing bool) error { + // When we're finished shrinking, attach an execution trace to the last call. If verboseTracing is true, attach to all calls. if len(shrunkenCallSequence) > 0 { - err = shrunkenCallSequence[len(shrunkenCallSequence)-1].AttachExecutionTrace(worker.chain, worker.fuzzer.contractDefinitions) + _, err = calls.ExecuteCallSequenceWithExecutionTracer(worker.chain, worker.fuzzer.contractDefinitions, shrunkenCallSequence, verboseTracing) if err != nil { return err } @@ -222,6 +213,7 @@ func (t *AssertionTestCaseProvider) callSequencePostCallTest(worker *FuzzerWorke // Update our test state and report it finalized. testCase.status = TestCaseStatusFailed testCase.callSequence = &shrunkenCallSequence + worker.workerMetrics().failedSequences.Add(worker.workerMetrics().failedSequences, big.NewInt(1)) worker.Fuzzer().ReportTestCaseFinished(testCase) return nil }, @@ -234,3 +226,37 @@ func (t *AssertionTestCaseProvider) callSequencePostCallTest(worker *FuzzerWorke return shrinkRequests, nil } + +// encounteredAssertionFailure takes in a panic code and a config.AssertionModesConfig and will determine whether the +// panic code that was hit should be treated as a failing case - which will be determined by whether that panic +// code was enabled in the config. Note that the panic codes are defined in the abiutils package and that this function +// panic if it is provided a panic code that is not defined in the abiutils package. +// TODO: This is a terrible design and a future PR should be made to maintain assertion and panic logic correctly +func encounteredAssertionFailure(panicCode uint64, conf config.PanicCodeConfig) bool { + // Switch on panic code + switch panicCode { + case abiutils.PanicCodeCompilerInserted: + return conf.FailOnCompilerInsertedPanic + case abiutils.PanicCodeAssertFailed: + return conf.FailOnAssertion + case abiutils.PanicCodeArithmeticUnderOverflow: + return conf.FailOnArithmeticUnderflow + case abiutils.PanicCodeDivideByZero: + return conf.FailOnDivideByZero + case abiutils.PanicCodeEnumTypeConversionOutOfBounds: + return conf.FailOnEnumTypeConversionOutOfBounds + case abiutils.PanicCodeIncorrectStorageAccess: + return conf.FailOnIncorrectStorageAccess + case abiutils.PanicCodePopEmptyArray: + return conf.FailOnPopEmptyArray + case abiutils.PanicCodeOutOfBoundsArrayAccess: + return conf.FailOnOutOfBoundsArrayAccess + case abiutils.PanicCodeAllocateTooMuchMemory: + return conf.FailOnAllocateTooMuchMemory + case abiutils.PanicCodeCallUninitializedVariable: + return conf.FailOnCallUninitializedVariable + default: + // If we encounter an unknown panic code, we ignore it + return false + } +} diff --git a/fuzzing/test_case_optimization.go b/fuzzing/test_case_optimization.go new file mode 100644 index 00000000..3c785047 --- /dev/null +++ b/fuzzing/test_case_optimization.go @@ -0,0 +1,86 @@ +package fuzzing + +import ( + "fmt" + "math/big" + "strings" + "sync" + + "github.com/crytic/medusa/fuzzing/calls" + "github.com/crytic/medusa/fuzzing/contracts" + "github.com/crytic/medusa/fuzzing/executiontracer" + "github.com/crytic/medusa/logging" + "github.com/crytic/medusa/logging/colors" + "github.com/ethereum/go-ethereum/accounts/abi" +) + +// OptimizationTestCase describes a test being run by a OptimizationTestCaseProvider. +type OptimizationTestCase struct { + // status describes the status of the test case + status TestCaseStatus + // targetContract describes the target contract where the test case was found + targetContract *contracts.Contract + // targetMethod describes the target method for the test case + targetMethod abi.Method + // callSequence describes the call sequence that maximized the value + callSequence *calls.CallSequence + // value is used to store the maximum value returned by the test method + value *big.Int + // valueLock is used for thread-synchronization when updating the value + valueLock sync.Mutex + // optimizationTestTrace describes the execution trace when running the callSequence + optimizationTestTrace *executiontracer.ExecutionTrace +} + +// Status describes the TestCaseStatus used to define the current state of the test. +func (t *OptimizationTestCase) Status() TestCaseStatus { + return t.status +} + +// CallSequence describes the calls.CallSequence of calls sent to the EVM which resulted in this TestCase result. +// This should be nil if the result is not related to the CallSequence. +func (t *OptimizationTestCase) CallSequence() *calls.CallSequence { + return t.callSequence +} + +// Name describes the name of the test case. +func (t *OptimizationTestCase) Name() string { + return fmt.Sprintf("Optimization Test: %s.%s", t.targetContract.Name(), t.targetMethod.Sig) +} + +// LogMessage obtains a buffer that represents the result of the OptimizationTestCase. This buffer can be passed to a logger for +// console or file logging. +func (t *OptimizationTestCase) LogMessage() *logging.LogBuffer { + buffer := logging.NewLogBuffer() + + // Note that optimization tests will always pass + buffer.Append(colors.GreenBold, fmt.Sprintf("[%s] ", t.Status()), colors.Bold, t.Name(), colors.Reset, "\n") + if t.Status() != TestCaseStatusNotStarted { + buffer.Append(fmt.Sprintf("Test for method \"%s.%s\" resulted in the maximum value: ", t.targetContract.Name(), t.targetMethod.Sig)) + buffer.Append(colors.Bold, t.value, colors.Reset, "\n") + buffer.Append(colors.Bold, "[Call Sequence]", colors.Reset, "\n") + buffer.Append(t.CallSequence().Log().Elements()...) + } + // If an execution trace is attached then add it to the message + if t.optimizationTestTrace != nil { + buffer.Append(colors.Bold, "[Optimization Test Execution Trace]", colors.Reset, "\n") + buffer.Append(t.optimizationTestTrace.Log().Elements()...) + } + return buffer +} + +// Message obtains a text-based printable message which describes the result of the OptimizationTestCase. +func (t *OptimizationTestCase) Message() string { + // Internally, we just call log message and convert it to a string. This can be useful for 3rd party apps + return t.LogMessage().String() +} + +// ID obtains a unique identifier for a test result. +func (t *OptimizationTestCase) ID() string { + return strings.Replace(fmt.Sprintf("OPTIMIZATION-%s-%s", t.targetContract.Name(), t.targetMethod.Sig), "_", "-", -1) +} + +// Value obtains the maximum value returned by the test method found till now +func (t *OptimizationTestCase) Value() *big.Int { + return t.value +} diff --git a/fuzzing/test_case_optimization_provider.go b/fuzzing/test_case_optimization_provider.go new file mode 100644 index 00000000..e782c646 --- /dev/null +++ b/fuzzing/test_case_optimization_provider.go @@ -0,0 +1,357 @@ +package fuzzing + +import ( + "fmt" + "math/big" + "sync" + + "github.com/crytic/medusa/fuzzing/calls" + "github.com/crytic/medusa/fuzzing/contracts" + "github.com/crytic/medusa/fuzzing/executiontracer" + "github.com/ethereum/go-ethereum/core" + "golang.org/x/exp/slices" +) + +const MIN_INT = "-8000000000000000000000000000000000000000000000000000000000000000" + +// OptimizationTestCaseProvider is a provider for on-chain optimization tests. +// Optimization tests are represented as publicly-accessible functions which have a name prefix specified by a +// config.FuzzingConfig. They take no input arguments and return an integer value that needs to be maximized. +type OptimizationTestCaseProvider struct { + // fuzzer describes the Fuzzer which this provider is attached to. + fuzzer *Fuzzer + + // testCases is a map of contract-method IDs to optimization test cases.GetContractMethodID + testCases map[contracts.ContractMethodID]*OptimizationTestCase + + // testCasesLock is used for thread-synchronization when updating testCases + testCasesLock sync.Mutex + + // workerStates is a slice where each element stores state for a given worker index. + workerStates []optimizationTestCaseProviderWorkerState +} + +// optimizationTestCaseProviderWorkerState represents the state for an individual worker maintained by +// OptimizationTestCaseProvider. +type optimizationTestCaseProviderWorkerState struct { + // optimizationTestMethods a mapping from contract-method ID to deployed contract-method descriptors. + // Each deployed contract-method represents an optimization test method to call for evaluation. Optimization tests + // should be read-only functions which take no input parameters and return an integer variable. + optimizationTestMethods map[contracts.ContractMethodID]contracts.DeployedContractMethod + + // optimizationTestMethodsLock is used for thread-synchronization when updating optimizationTestMethods + optimizationTestMethodsLock sync.Mutex +} + +// attachOptimizationTestCaseProvider attaches a new OptimizationTestCaseProvider to the Fuzzer and returns it. +func attachOptimizationTestCaseProvider(fuzzer *Fuzzer) *OptimizationTestCaseProvider { + // If there are no testing prefixes, then there is no reason to attach a test case provider and subscribe to events + if len(fuzzer.config.Fuzzing.Testing.OptimizationTesting.TestPrefixes) == 0 { + return nil + } + + // Create a test case provider + t := &OptimizationTestCaseProvider{ + fuzzer: fuzzer, + } + + // Subscribe the provider to relevant events the fuzzer emits. + fuzzer.Events.FuzzerStarting.Subscribe(t.onFuzzerStarting) + fuzzer.Events.FuzzerStopping.Subscribe(t.onFuzzerStopping) + fuzzer.Events.WorkerCreated.Subscribe(t.onWorkerCreated) + + // Add the provider's call sequence test function to the fuzzer. + fuzzer.Hooks.CallSequenceTestFuncs = append(fuzzer.Hooks.CallSequenceTestFuncs, t.callSequencePostCallTest) + return t +} + +// runOptimizationTest executes a given optimization test method (w/ an optional execution trace) and returns the return value +// from the optimization test method. This is called after every call the Fuzzer makes when testing call sequences for each test case. +func (t *OptimizationTestCaseProvider) runOptimizationTest(worker *FuzzerWorker, optimizationTestMethod *contracts.DeployedContractMethod, trace bool) (*big.Int, *executiontracer.ExecutionTrace, error) { + // Generate our ABI input data for the call. In this case, optimization test methods take no arguments, so the + // variadic argument list here is empty. + data, err := optimizationTestMethod.Contract.CompiledContract().Abi.Pack(optimizationTestMethod.Method.Name) + if err != nil { + return nil, nil, err + } + + // Call the underlying contract + value := big.NewInt(0) + // TODO: Determine if we should use `Senders[0]` or have a separate funded account for the optimizations. + msg := calls.NewCallMessage(worker.Fuzzer().senders[0], &optimizationTestMethod.Address, 0, value, worker.fuzzer.config.Fuzzing.TransactionGasLimit, nil, nil, nil, data) + msg.FillFromTestChainProperties(worker.chain) + + // Execute the call. If we are tracing, we attach an execution tracer and obtain the result. + var executionResult *core.ExecutionResult + var executionTrace *executiontracer.ExecutionTrace + if trace { + executionResult, executionTrace, err = executiontracer.CallWithExecutionTrace(worker.chain, worker.fuzzer.contractDefinitions, msg.ToCoreMessage(), nil) + } else { + executionResult, err = worker.Chain().CallContract(msg.ToCoreMessage(), nil) + } + if err != nil { + return nil, nil, fmt.Errorf("failed to call optimization test method: %v", err) + } + + // If the execution reverted, then we know that we do not have any valuable return data, so we return the smallest + // integer value + if executionResult.Failed() { + minInt256, _ := new(big.Int).SetString(MIN_INT, 16) + return minInt256, nil, nil + } + + // Decode our ABI outputs + retVals, err := optimizationTestMethod.Method.Outputs.Unpack(executionResult.Return()) + if err != nil { + return nil, nil, fmt.Errorf("failed to decode optimization test method return value: %v", err) + } + + // We should have one return value. + if len(retVals) != 1 { + return nil, nil, fmt.Errorf("detected an unexpected number of return values from optimization test '%s'", optimizationTestMethod.Method.Name) + } + + // Parse the return value and it should be an int256 + newValue, ok := retVals[0].(*big.Int) + if !ok { + return nil, nil, fmt.Errorf("failed to parse optimization test's: %s return value: %v", optimizationTestMethod.Method.Name, retVals[0]) + } + + return newValue, executionTrace, nil +} + +// onFuzzerStarting is the event handler triggered when the Fuzzer is starting a fuzzing campaign. It creates test cases +// in a "not started" state for every optimization test method discovered in the contract definitions known to the Fuzzer. +func (t *OptimizationTestCaseProvider) onFuzzerStarting(event FuzzerStartingEvent) error { + // Reset our state + t.testCases = make(map[contracts.ContractMethodID]*OptimizationTestCase) + t.workerStates = make([]optimizationTestCaseProviderWorkerState, t.fuzzer.Config().Fuzzing.Workers) + + // Create a test case for every optimization test method. + for _, contract := range t.fuzzer.ContractDefinitions() { + // If we're not testing all contracts, verify the current contract is one we specified in our target contracts + if !t.fuzzer.config.Fuzzing.Testing.TestAllContracts && !slices.Contains(t.fuzzer.config.Fuzzing.TargetContracts, contract.Name()) { + continue + } + + for _, method := range contract.OptimizationTestMethods { + // Create local variables to avoid pointer types in the loop being overridden. + contract := contract + method := method + minInt256, _ := new(big.Int).SetString(MIN_INT, 16) + + // Create our optimization test case + optimizationTestCase := &OptimizationTestCase{ + status: TestCaseStatusNotStarted, + targetContract: contract, + targetMethod: method, + callSequence: nil, + value: minInt256, + } + + // Add to our test cases and register them with the fuzzer + methodId := contracts.GetContractMethodID(contract, &method) + t.testCases[methodId] = optimizationTestCase + t.fuzzer.RegisterTestCase(optimizationTestCase) + } + } + return nil +} + +// onFuzzerStopping is the event handler triggered when the Fuzzer is stopping the fuzzing campaign and all workers +// have been destroyed. It clears state tracked for each FuzzerWorker and sets test cases in "running" states to +// "passed". +func (t *OptimizationTestCaseProvider) onFuzzerStopping(event FuzzerStoppingEvent) error { + // Clear our optimization test methods + t.workerStates = nil + + // Loop through each test case and set any tests with a running status to a passed status. + for _, testCase := range t.testCases { + if testCase.status == TestCaseStatusRunning { + testCase.status = TestCaseStatusPassed + } + } + return nil +} + +// onWorkerCreated is the event handler triggered when a FuzzerWorker is created by the Fuzzer. It ensures state tracked +// for that worker index is refreshed and subscribes to relevant worker events. +func (t *OptimizationTestCaseProvider) onWorkerCreated(event FuzzerWorkerCreatedEvent) error { + // Create a new state for this worker. + t.workerStates[event.Worker.WorkerIndex()] = optimizationTestCaseProviderWorkerState{ + optimizationTestMethods: make(map[contracts.ContractMethodID]contracts.DeployedContractMethod), + optimizationTestMethodsLock: sync.Mutex{}, + } + + // Subscribe to relevant worker events. + event.Worker.Events.ContractAdded.Subscribe(t.onWorkerDeployedContractAdded) + event.Worker.Events.ContractDeleted.Subscribe(t.onWorkerDeployedContractDeleted) + return nil +} + +// onWorkerDeployedContractAdded is the event handler triggered when a FuzzerWorker detects a new contract deployment +// on its underlying chain. It ensures any optimization test methods which the deployed contract contains are tracked by the +// provider for testing. Any test cases previously made for these methods which are in a "not started" state are put +// into a "running" state, as they are now potentially reachable for testing. +func (t *OptimizationTestCaseProvider) onWorkerDeployedContractAdded(event FuzzerWorkerContractAddedEvent) error { + // If we don't have a contract definition, we can't run optimization tests against the contract. + if event.ContractDefinition == nil { + return nil + } + + // Loop through all methods and find ones for which we have tests + for _, method := range event.ContractDefinition.CompiledContract().Abi.Methods { + // Obtain an identifier for this pair + methodId := contracts.GetContractMethodID(event.ContractDefinition, &method) + + // If we have a test case targeting this contract/method that has not failed, track this deployed method in + // our map for this worker. If we have any tests in a not-started state, we can signal a running state now. + t.testCasesLock.Lock() + optimizationTestCase, optimizationTestCaseExists := t.testCases[methodId] + t.testCasesLock.Unlock() + + if optimizationTestCaseExists { + if optimizationTestCase.Status() == TestCaseStatusNotStarted { + optimizationTestCase.status = TestCaseStatusRunning + } + if optimizationTestCase.Status() != TestCaseStatusFailed { + // Create our optimization test method reference. + workerState := &t.workerStates[event.Worker.WorkerIndex()] + workerState.optimizationTestMethodsLock.Lock() + workerState.optimizationTestMethods[methodId] = contracts.DeployedContractMethod{ + Address: event.ContractAddress, + Contract: event.ContractDefinition, + Method: method, + } + workerState.optimizationTestMethodsLock.Unlock() + } + } + } + return nil +} + +// onWorkerDeployedContractDeleted is the event handler triggered when a FuzzerWorker detects that a previously deployed +// contract no longer exists on its underlying chain. It ensures any optimization test methods which the deployed contract +// contained are no longer tracked by the provider for testing. +func (t *OptimizationTestCaseProvider) onWorkerDeployedContractDeleted(event FuzzerWorkerContractDeletedEvent) error { + // If we don't have a contract definition, there's nothing to do. + if event.ContractDefinition == nil { + return nil + } + + // Loop through all methods and find ones for which we have tests + for _, method := range event.ContractDefinition.CompiledContract().Abi.Methods { + // Obtain an identifier for this pair + methodId := contracts.GetContractMethodID(event.ContractDefinition, &method) + + // If this identifier is in our test cases map, then we remove it from our optimization test method lookup for + // this worker index. + t.testCasesLock.Lock() + _, isOptimizationTestMethod := t.testCases[methodId] + t.testCasesLock.Unlock() + + if isOptimizationTestMethod { + // Delete our optimization test method reference. + workerState := &t.workerStates[event.Worker.WorkerIndex()] + workerState.optimizationTestMethodsLock.Lock() + delete(workerState.optimizationTestMethods, methodId) + workerState.optimizationTestMethodsLock.Unlock() + } + } + return nil +} + +// callSequencePostCallTest provides is a CallSequenceTestFunc that performs post-call testing logic for the attached Fuzzer +// and any underlying FuzzerWorker. It is called after every call made in a call sequence. It checks whether any +// optimization test's value has increased. +func (t *OptimizationTestCaseProvider) callSequencePostCallTest(worker *FuzzerWorker, callSequence calls.CallSequence) ([]ShrinkCallSequenceRequest, error) { + // Create a list of shrink call sequence verifiers, which we populate for each maximized optimization test we want a call + // sequence shrunk for. + shrinkRequests := make([]ShrinkCallSequenceRequest, 0) + + // Obtain the test provider state for this worker + workerState := &t.workerStates[worker.WorkerIndex()] + + // Loop through all optimization test methods and test them. + for optimizationTestMethodId, workerOptimizationTestMethod := range workerState.optimizationTestMethods { + // Obtain the test case for this optimization test method + t.testCasesLock.Lock() + testCase := t.testCases[optimizationTestMethodId] + t.testCasesLock.Unlock() + + // Run our optimization test (create a local copy to avoid loop overwriting the method) + workerOptimizationTestMethod := workerOptimizationTestMethod + newValue, _, err := t.runOptimizationTest(worker, &workerOptimizationTestMethod, false) + if err != nil { + return nil, err + } + + // If we updated the test case's maximum value, we update our state immediately. We provide a shrink verifier which will update + // the call sequence for each shrunken sequence provided that still it maintains the maximum value. + // TODO: This is very inefficient since this runs every time a new max value is found. It would be ideal if we + // could perform a one-time shrink request. This code should be refactored when we introduce the high-level + // testing API. + if newValue.Cmp(testCase.value) == 1 { + // Create a request to shrink this call sequence. + shrinkRequest := ShrinkCallSequenceRequest{ + VerifierFunction: func(worker *FuzzerWorker, shrunkenCallSequence calls.CallSequence) (bool, error) { + // First verify the contract to the optimization test is still deployed to call upon. + _, optimizationTestContractDeployed := worker.deployedContracts[workerOptimizationTestMethod.Address] + if !optimizationTestContractDeployed { + // If the contract isn't available, this shrunk sequence likely messed up deployment, so we + // report it as an invalid solution. + return false, nil + } + + // Then the shrink verifier ensures that the maximum value has either stayed the same or, hopefully, + // increased. + shrunkenSequenceNewValue, _, err := t.runOptimizationTest(worker, &workerOptimizationTestMethod, false) + + // If the shrunken value is greater than new value, then set new value to the shrunken one so that it + // can be tracked correctly in the finished callback + if err == nil && shrunkenSequenceNewValue.Cmp(newValue) == 1 { + newValue = new(big.Int).Set(shrunkenSequenceNewValue) + } + + return shrunkenSequenceNewValue.Cmp(newValue) >= 0, err + }, + FinishedCallback: func(worker *FuzzerWorker, shrunkenCallSequence calls.CallSequence, verboseTracing bool) error { + // When we're finished shrinking, attach an execution trace to the last call. If verboseTracing is true, attach to all calls. + if len(shrunkenCallSequence) > 0 { + _, err = calls.ExecuteCallSequenceWithExecutionTracer(worker.chain, worker.fuzzer.contractDefinitions, shrunkenCallSequence, verboseTracing) + if err != nil { + return err + } + } + + // Execute the property test a final time, this time obtaining an execution trace + shrunkenSequenceNewValue, executionTrace, err := t.runOptimizationTest(worker, &workerOptimizationTestMethod, true) + if err != nil { + return err + } + + // If, for some reason, the shrunken sequence lowers the new max value, do not save anything and exit + if shrunkenSequenceNewValue.Cmp(newValue) < 0 { + return fmt.Errorf("optimized call sequence failed to maximize value") + } + + // Update our value with lock + testCase.valueLock.Lock() + testCase.value = new(big.Int).Set(shrunkenSequenceNewValue) + testCase.valueLock.Unlock() + + // Update call sequence and trace + testCase.callSequence = &shrunkenCallSequence + testCase.optimizationTestTrace = executionTrace + return nil + }, + RecordResultInCorpus: true, + } + + // Add our shrink request to our list. + shrinkRequests = append(shrinkRequests, shrinkRequest) + } + } + + return shrinkRequests, nil +} diff --git a/fuzzing/test_case_property.go b/fuzzing/test_case_property.go index 14b5e7de..f20d0769 100644 --- a/fuzzing/test_case_property.go +++ b/fuzzing/test_case_property.go @@ -5,16 +5,23 @@ import ( "github.com/crytic/medusa/fuzzing/calls" fuzzerTypes "github.com/crytic/medusa/fuzzing/contracts" "github.com/crytic/medusa/fuzzing/executiontracer" + "github.com/crytic/medusa/logging" + "github.com/crytic/medusa/logging/colors" "github.com/ethereum/go-ethereum/accounts/abi" "strings" ) // PropertyTestCase describes a test being run by a PropertyTestCaseProvider. type PropertyTestCase struct { - status TestCaseStatus - targetContract *fuzzerTypes.Contract - targetMethod abi.Method - callSequence *calls.CallSequence + // status describes the status of the test case + status TestCaseStatus + // targetContract describes the target contract where the test case was found + targetContract *fuzzerTypes.Contract + // targetMethod describes the target method for the test case + targetMethod abi.Method + // callSequence describes the call sequence that broke the property + callSequence *calls.CallSequence + // propertyTestTrace describes the execution trace when running the callSequence propertyTestTrace *executiontracer.ExecutionTrace } @@ -34,24 +41,33 @@ func (t *PropertyTestCase) Name() string { return fmt.Sprintf("Property Test: %s.%s", t.targetContract.Name(), t.targetMethod.Sig) } -// Message obtains a text-based printable message which describes the test result. -func (t *PropertyTestCase) Message() string { +// LogMessage obtains a buffer that represents the result of the PropertyTestCase. This buffer can be passed to a logger for +// console or file logging. +func (t *PropertyTestCase) LogMessage() *logging.LogBuffer { // If the test failed, return a failure message. + buffer := logging.NewLogBuffer() if t.Status() == TestCaseStatusFailed { - msg := fmt.Sprintf( - "Property test \"%s.%s\" failed after the following call sequence:\n%s", - t.targetContract.Name(), - t.targetMethod.Sig, - t.CallSequence().String(), - ) + buffer.Append(colors.RedBold, fmt.Sprintf("[%s] ", t.Status()), colors.Bold, t.Name(), colors.Reset, "\n") + buffer.Append(fmt.Sprintf("Test for method \"%s.%s\" failed after the following call sequence:\n", t.targetContract.Name(), t.targetMethod.Sig)) + buffer.Append(colors.Bold, "[Call Sequence]", colors.Reset, "\n") + buffer.Append(t.CallSequence().Log().Elements()...) + // If an execution trace is attached then add it to the message if t.propertyTestTrace != nil { - // TODO: Improve formatting in logging PR - msg += fmt.Sprintf("\nProperty test execution trace:\n%s", t.propertyTestTrace.String()) + buffer.Append(colors.Bold, "[Property Test Execution Trace]", colors.Reset, "\n") + buffer.Append(t.propertyTestTrace.Log().Elements()...) } - return msg + return buffer } - return "" + + buffer.Append(colors.GreenBold, fmt.Sprintf("[%s] ", t.Status()), colors.Bold, t.Name(), colors.Reset) + return buffer +} + +// Message obtains a text-based printable message which describes the result of the PropertyTestCase. +func (t *PropertyTestCase) Message() string { + // Internally, we just call log message and convert it to a string. This can be useful for 3rd party apps + return t.LogMessage().String() } // ID obtains a unique identifier for a test result. diff --git a/fuzzing/test_case_property_provider.go b/fuzzing/test_case_property_provider.go index c509807b..6bb6d419 100644 --- a/fuzzing/test_case_property_provider.go +++ b/fuzzing/test_case_property_provider.go @@ -2,15 +2,14 @@ package fuzzing import ( "fmt" + "math/big" + "sync" + "github.com/crytic/medusa/fuzzing/calls" "github.com/crytic/medusa/fuzzing/contracts" "github.com/crytic/medusa/fuzzing/executiontracer" - "github.com/ethereum/go-ethereum/accounts/abi" "github.com/ethereum/go-ethereum/core" "golang.org/x/exp/slices" - "math/big" - "strings" - "sync" ) // PropertyTestCaseProvider is a provider for on-chain property tests. @@ -47,6 +46,11 @@ type propertyTestCaseProviderWorkerState struct { // attachPropertyTestCaseProvider attaches a new PropertyTestCaseProvider to the Fuzzer and returns it. func attachPropertyTestCaseProvider(fuzzer *Fuzzer) *PropertyTestCaseProvider { + // If there are no testing prefixes, then there is no reason to attach a test case provider and subscribe to events + if len(fuzzer.config.Fuzzing.Testing.PropertyTesting.TestPrefixes) == 0 { + return nil + } + // Create a test case provider t := &PropertyTestCaseProvider{ fuzzer: fuzzer, @@ -62,20 +66,6 @@ func attachPropertyTestCaseProvider(fuzzer *Fuzzer) *PropertyTestCaseProvider { return t } -// isPropertyTest check whether the method is a property test given potential naming prefixes it must conform to -// and its underlying input/output arguments. -func (t *PropertyTestCaseProvider) isPropertyTest(method abi.Method) bool { - // Loop through all enabled prefixes to find a match - for _, prefix := range t.fuzzer.Config().Fuzzing.Testing.PropertyTesting.TestPrefixes { - if strings.HasPrefix(method.Name, prefix) { - if len(method.Inputs) == 0 && len(method.Outputs) == 1 && method.Outputs[0].Type.T == abi.BoolTy { - return true - } - } - } - return false -} - // checkPropertyTestFailed executes a given property test method to see if it returns a failed status. This is used to // facilitate testing of property test methods after every call the Fuzzer makes when testing call sequences. // A boolean indicating whether an execution trace should be captured and returned is provided to the method. @@ -98,11 +88,9 @@ func (t *PropertyTestCaseProvider) checkPropertyTestFailed(worker *FuzzerWorker, var executionResult *core.ExecutionResult var executionTrace *executiontracer.ExecutionTrace if trace { - executionTracer := executiontracer.NewExecutionTracer(worker.fuzzer.contractDefinitions, worker.chain.CheatCodeContracts()) - executionResult, err = worker.Chain().CallContract(msg, nil, executionTracer) - executionTrace = executionTracer.Trace() + executionResult, executionTrace, err = executiontracer.CallWithExecutionTrace(worker.chain, worker.fuzzer.contractDefinitions, msg.ToCoreMessage(), nil) } else { - executionResult, err = worker.Chain().CallContract(msg, nil) + executionResult, err = worker.Chain().CallContract(msg.ToCoreMessage(), nil) } if err != nil { return false, nil, fmt.Errorf("failed to call property test method: %v", err) @@ -110,7 +98,7 @@ func (t *PropertyTestCaseProvider) checkPropertyTestFailed(worker *FuzzerWorker, // If our property test method call failed, we flag a failed test. if executionResult.Failed() { - return true, nil, nil + return true, executionTrace, nil } // Decode our ABI outputs @@ -143,17 +131,12 @@ func (t *PropertyTestCaseProvider) onFuzzerStarting(event FuzzerStartingEvent) e // Create a test case for every property test method. for _, contract := range t.fuzzer.ContractDefinitions() { - // If we're not testing all contracts, verify the current contract is one we specified in our deployment order. - if !t.fuzzer.config.Fuzzing.Testing.TestAllContracts && !slices.Contains(t.fuzzer.config.Fuzzing.DeploymentOrder, contract.Name()) { + // If we're not testing all contracts, verify the current contract is one we specified in our target contracts. + if !t.fuzzer.config.Fuzzing.Testing.TestAllContracts && !slices.Contains(t.fuzzer.config.Fuzzing.TargetContracts, contract.Name()) { continue } - for _, method := range contract.CompiledContract().Abi.Methods { - // Verify this method is a property test method - if !t.isPropertyTest(method) { - continue - } - + for _, method := range contract.PropertyTestMethods { // Create local variables to avoid pointer types in the loop being overridden. contract := contract method := method @@ -175,7 +158,7 @@ func (t *PropertyTestCaseProvider) onFuzzerStarting(event FuzzerStartingEvent) e return nil } -// onFuzzerStarting is the event handler triggered when the Fuzzer is stopping the fuzzing campaign and all workers +// onFuzzerStopping is the event handler triggered when the Fuzzer is stopping the fuzzing campaign and all workers // have been destroyed. It clears state tracked for each FuzzerWorker and sets test cases in "running" states to // "passed". func (t *PropertyTestCaseProvider) onFuzzerStopping(event FuzzerStoppingEvent) error { @@ -247,7 +230,7 @@ func (t *PropertyTestCaseProvider) onWorkerDeployedContractAdded(event FuzzerWor return nil } -// onWorkerDeployedContractAdded is the event handler triggered when a FuzzerWorker detects that a previously deployed +// onWorkerDeployedContractDeleted is the event handler triggered when a FuzzerWorker detects that a previously deployed // contract no longer exists on its underlying chain. It ensures any property test methods which the deployed contract // contained are no longer tracked by the provider for testing. func (t *PropertyTestCaseProvider) onWorkerDeployedContractDeleted(event FuzzerWorkerContractDeletedEvent) error { @@ -327,10 +310,10 @@ func (t *PropertyTestCaseProvider) callSequencePostCallTest(worker *FuzzerWorker shrunkenSequenceFailedTest, _, err := t.checkPropertyTestFailed(worker, &workerPropertyTestMethod, false) return shrunkenSequenceFailedTest, err }, - FinishedCallback: func(worker *FuzzerWorker, shrunkenCallSequence calls.CallSequence) error { - // When we're finished shrinking, attach an execution trace to the last call + FinishedCallback: func(worker *FuzzerWorker, shrunkenCallSequence calls.CallSequence, verboseTracing bool) error { + // When we're finished shrinking, attach an execution trace to the last call. If verboseTracing is true, attach to all calls. if len(shrunkenCallSequence) > 0 { - err = shrunkenCallSequence[len(shrunkenCallSequence)-1].AttachExecutionTrace(worker.chain, worker.fuzzer.contractDefinitions) + _, err = calls.ExecuteCallSequenceWithExecutionTracer(worker.chain, worker.fuzzer.contractDefinitions, shrunkenCallSequence, verboseTracing) if err != nil { return err } @@ -349,6 +332,7 @@ func (t *PropertyTestCaseProvider) callSequencePostCallTest(worker *FuzzerWorker testCase.status = TestCaseStatusFailed testCase.callSequence = &shrunkenCallSequence testCase.propertyTestTrace = executionTrace + worker.workerMetrics().failedSequences.Add(worker.workerMetrics().failedSequences, big.NewInt(1)) worker.Fuzzer().ReportTestCaseFinished(testCase) return nil }, diff --git a/fuzzing/testdata/contracts/assertions/assert_allocate_too_much_memory.sol b/fuzzing/testdata/contracts/assertions/assert_allocate_too_much_memory.sol new file mode 100644 index 00000000..ff4be401 --- /dev/null +++ b/fuzzing/testdata/contracts/assertions/assert_allocate_too_much_memory.sol @@ -0,0 +1,9 @@ +// This contract attempts to allocate an excessive amount of memory by creating an array with a length of 2^64 causing a panic. +// PanicCodeAllocateTooMuchMemory = 0x41 + +contract TestContract { + function allocateTooMuchMemory() public { + uint256[] memory myArray = new uint256[](2**64); // Allocate too much memory + myArray[2**64 - 1] = 42; + } +} diff --git a/fuzzing/testdata/contracts/assertions/assert_and_property_test.sol b/fuzzing/testdata/contracts/assertions/assert_and_property_test.sol index 90051ba5..03100042 100644 --- a/fuzzing/testdata/contracts/assertions/assert_and_property_test.sol +++ b/fuzzing/testdata/contracts/assertions/assert_and_property_test.sol @@ -5,7 +5,7 @@ contract TestContract { assert(false); } - function fuzz_failing_property() public view returns (bool) { + function property_failing_property() public view returns (bool) { // ASSERTION: fail immediately. return false; } diff --git a/fuzzing/testdata/contracts/assertions/assert_arithmetic_underflow.sol b/fuzzing/testdata/contracts/assertions/assert_arithmetic_underflow.sol new file mode 100644 index 00000000..c4930d04 --- /dev/null +++ b/fuzzing/testdata/contracts/assertions/assert_arithmetic_underflow.sol @@ -0,0 +1,9 @@ +// A call to `arithmeticOverflow` function in this contract would trigger an arithmetic overflow panic +// PanicCodeArithmeticUnderOverflow = 0x11 +contract TestContract { + function arithmeticOverflow() public { + uint8 a = 255; + uint8 b = 1; + uint8 c = a + b; + } +} diff --git a/fuzzing/testdata/contracts/assertions/assert_call_uninitialized_variable.sol b/fuzzing/testdata/contracts/assertions/assert_call_uninitialized_variable.sol new file mode 100644 index 00000000..7a9b64e3 --- /dev/null +++ b/fuzzing/testdata/contracts/assertions/assert_call_uninitialized_variable.sol @@ -0,0 +1,15 @@ +// An attempt to call an uninitialized function pointer would cause a panic +// PanicCodeCallUninitializedVariable = 0x51 + +contract TestContract { + + function uninitializedVariableCall() public returns (int) + { + // Variable containing a function pointer + function (int, int) internal pure returns (int) funcPtr; + + // This call will fail because funcPtr is still a zero-initialized function pointer + return funcPtr(4, 5); + } + +} diff --git a/fuzzing/testdata/contracts/assertions/assert_constant_method.sol b/fuzzing/testdata/contracts/assertions/assert_constant_method.sol new file mode 100644 index 00000000..0ddd2fdf --- /dev/null +++ b/fuzzing/testdata/contracts/assertions/assert_constant_method.sol @@ -0,0 +1,148 @@ +// An assertion failure in the constant method should be detected +// Contract updated to solc > 0.8.0 and taken from https://github.com/crytic/echidna/blob/5757f8c3c07d0248cbe1728506ff0f8daccbef12/tests/solidity/assert/fullmath.sol +library FullMath { + /// @notice Calculates floor(a×b÷denominator) with full precision. Throws if result overflows a uint256 or denominator == 0 + /// @param a The multiplicand + /// @param b The multiplier + /// @param denominator The divisor + /// @return result The 256-bit result + /// @dev Credit to Remco Bloemen under MIT license https://xn--2-umb.com/21/muldiv + function mulDiv( + uint256 a, + uint256 b, + uint256 denominator + ) internal pure returns (uint256 result) { + unchecked { + + + // 512-bit multiply [prod1 prod0] = a * b + // Compute the product mod 2**256 and mod 2**256 - 1 + // then use the Chinese Remainder Theorem to reconstruct + // the 512 bit result. The result is stored in two 256 + // variables such that product = prod1 * 2**256 + prod0 + uint256 prod0; // Least significant 256 bits of the product + uint256 prod1; // Most significant 256 bits of the product + assembly { + let mm := mulmod(a, b, not(0)) + prod0 := mul(a, b) + prod1 := sub(sub(mm, prod0), lt(mm, prod0)) + } + + // Handle non-overflow cases, 256 by 256 division + if (prod1 == 0) { + require(denominator > 0); + assembly { + result := div(prod0, denominator) + } + return result; + } + + // Make sure the result is less than 2**256. + // Also prevents denominator == 0 + require(denominator > prod1); + + /////////////////////////////////////////////// + // 512 by 256 division. + /////////////////////////////////////////////// + + // Make division exact by subtracting the remainder from [prod1 prod0] + // Compute remainder using mulmod + uint256 remainder; + assembly { + remainder := mulmod(a, b, denominator) + } + // Subtract 256 bit number from 512 bit number + assembly { + prod1 := sub(prod1, gt(remainder, prod0)) + prod0 := sub(prod0, remainder) + } + + // Factor powers of two out of denominator + // Compute largest power of two divisor of denominator. + // Always >= 1. + uint256 twos = (0 - denominator) & denominator; + // Divide denominator by power of two + assembly { + denominator := div(denominator, twos) + } + + // Divide [prod1 prod0] by the factors of two + assembly { + prod0 := div(prod0, twos) + } + // Shift in bits from prod1 into prod0. For this we need + // to flip `twos` such that it is 2**256 / twos. + // If twos is zero, then it becomes one + assembly { + twos := add(div(sub(0, twos), twos), 1) + } + prod0 |= prod1 * twos; + + // Invert denominator mod 2**256 + // Now that denominator is an odd number, it has an inverse + // modulo 2**256 such that denominator * inv = 1 mod 2**256. + // Compute the inverse by starting with a seed that is correct + // correct for four bits. That is, denominator * inv = 1 mod 2**4 + uint256 inv = (3 * denominator) ^ 2; + // Now use Newton-Raphson iteration to improve the precision. + // Thanks to Hensel's lifting lemma, this also works in modular + // arithmetic, doubling the correct bits in each step. + inv *= 2 - denominator * inv; // inverse mod 2**8 + inv *= 2 - denominator * inv; // inverse mod 2**16 + inv *= 2 - denominator * inv; // inverse mod 2**32 + inv *= 2 - denominator * inv; // inverse mod 2**64 + inv *= 2 - denominator * inv; // inverse mod 2**128 + inv *= 2 - denominator * inv; // inverse mod 2**256 + + // Because the division is now exact we can divide by multiplying + // with the modular inverse of denominator. This will give us the + // correct result modulo 2**256. Since the precoditions guarantee + // that the outcome is less than 2**256, this is the final result. + // We don't need to compute the high bits of the result and prod1 + // is no longer required. + result = prod0 * inv; + return result; + } + } + + /// @notice Calculates ceil(a×b÷denominator) with full precision. Throws if result overflows a uint256 or denominator == 0 + /// @param a The multiplicand + /// @param b The multiplier + /// @param denominator The divisor + /// @return result The 256-bit result + function mulDivRoundingUp( + uint256 a, + uint256 b, + uint256 denominator + ) internal pure returns (uint256 result) { + // BUG + unchecked { + return mulDiv(a, b, denominator) + (mulmod(a, b, denominator) > 0 ? 1 : 0); + } + } +} + +contract TestContract { + function checkMulDivRoundingUp( + uint256 x, + uint256 y, + uint256 d + ) external pure { + require(d > 0); + uint256 z = FullMath.mulDivRoundingUp(x, y, d); + if (x == 0 || y == 0) { + assert(z == 0); + return; + } + + // recompute x and y via mulDiv of the result of floor(x*y/d), should always be less than original inputs by < d + uint256 x2 = FullMath.mulDiv(z, d, y); + uint256 y2 = FullMath.mulDiv(z, d, x); + assert(x2 >= x); + assert(y2 >= y); + + assert(x2 - x < d); + assert(y2 - y < d); + } + fallback() external {} +} diff --git a/fuzzing/testdata/contracts/assertions/assert_divide_by_zero.sol b/fuzzing/testdata/contracts/assertions/assert_divide_by_zero.sol new file mode 100644 index 00000000..14c1736f --- /dev/null +++ b/fuzzing/testdata/contracts/assertions/assert_divide_by_zero.sol @@ -0,0 +1,10 @@ +// Division operation performed with a divisor of zero would cause a panic +// PanicCodeDivideByZero = 0x12 + +contract TestContract { + function divideByZero() public { + uint8 a = 42; + uint8 b = 0; + uint8 c = a / b; + } +} diff --git a/fuzzing/testdata/contracts/assertions/assert_enum_type_conversion_outofbounds.sol b/fuzzing/testdata/contracts/assertions/assert_enum_type_conversion_outofbounds.sol new file mode 100644 index 00000000..626a3e10 --- /dev/null +++ b/fuzzing/testdata/contracts/assertions/assert_enum_type_conversion_outofbounds.sol @@ -0,0 +1,10 @@ +// Enum type conversion out of bounds would cause a panic +// PanicCodeEnumTypeConversionOutOfBounds = 0x21 +contract TestContract { + enum MyEnum { A, B, C } + + function enumTypeConversionOutOfBounds() public { + uint8 value = 4; // Out of bounds for MyEnum + MyEnum myEnum = MyEnum(value); + } +} diff --git a/fuzzing/testdata/contracts/assertions/assert_incorrect_storage_access.sol b/fuzzing/testdata/contracts/assertions/assert_incorrect_storage_access.sol new file mode 100644 index 00000000..6ff0cabc --- /dev/null +++ b/fuzzing/testdata/contracts/assertions/assert_incorrect_storage_access.sol @@ -0,0 +1,11 @@ +// This contract triggers an incorrect storage access panic +// PanicCodeIncorrectStorageAccess = 0x22 + +contract TestContract { + uint256[] public myArray; + + function incorrectStorageAccess() public returns(uint256) { + uint256 index = 7; // Index out of bounds + return myArray[index]; // Incorrect storage access + } +} diff --git a/fuzzing/testdata/contracts/assertions/assert_outofbounds_array_access.sol b/fuzzing/testdata/contracts/assertions/assert_outofbounds_array_access.sol new file mode 100644 index 00000000..79feed60 --- /dev/null +++ b/fuzzing/testdata/contracts/assertions/assert_outofbounds_array_access.sol @@ -0,0 +1,9 @@ +// This contract triggers an out-of-bounds array access panic +// PanicCodeOutOfBoundsArrayAccess = 0x32 + +contract TestContract { + function outOfBoundsArrayAccess() public { + uint256[] memory myArray = new uint256[](5); + uint256 value = myArray[6]; // Out of bounds array access + } +} diff --git a/fuzzing/testdata/contracts/assertions/assert_pop_empty_array.sol b/fuzzing/testdata/contracts/assertions/assert_pop_empty_array.sol new file mode 100644 index 00000000..345cca45 --- /dev/null +++ b/fuzzing/testdata/contracts/assertions/assert_pop_empty_array.sol @@ -0,0 +1,9 @@ +// Popping from an empty array triggers a `PopEmptyArray` panic +// PanicCodePopEmptyArray = 0x31 + +contract TestContract { + uint256[] public myArray; + function popEmptyArray() public { + myArray.pop(); // Pop from empty array + } +} diff --git a/fuzzing/testdata/contracts/chain/tx_out_of_gas.sol b/fuzzing/testdata/contracts/chain/tx_out_of_gas.sol index 96fb55d0..6054a7d8 100644 --- a/fuzzing/testdata/contracts/chain/tx_out_of_gas.sol +++ b/fuzzing/testdata/contracts/chain/tx_out_of_gas.sol @@ -10,7 +10,7 @@ contract TestContract { } } - function fuzz_never_apply_state_when_oog() public view returns (bool) { + function property_never_apply_state_when_oog() public view returns (bool) { // ASSERTION: this state should never be applied, as our out of gas error should revert changes. return x == 0; } diff --git a/fuzzing/testdata/contracts/cheat_codes/console_log/console_log.sol b/fuzzing/testdata/contracts/cheat_codes/console_log/console_log.sol new file mode 100644 index 00000000..ddf5ffe4 --- /dev/null +++ b/fuzzing/testdata/contracts/cheat_codes/console_log/console_log.sol @@ -0,0 +1,1568 @@ +// Test console.log capabilities to make sure logging and string formatting are happening as expected +contract TestContract { + + function testConsoleLog() public { + // Log an int256 + int256 i = 2; + console.log(i); + + // Log bytes + bytes memory byteSlice = "hello world"; + console.logBytes(byteSlice); + + // Log fixed bytes + bytes4 fixedBytes = "byte"; + console.logBytes4(fixedBytes); + + // Log a string and int256 while testing string formatting + string memory str = "i is %d"; + console.log(str, i); + + // Test the permutation logic by logging a random permutation and also string formatting + bool b = true; + address addr = address(0); + uint256 u = 100; + str = "%% bool is %t, addr is %s, u is %d"; + console.log(str, b, addr, u); + assert(false); + } +} + +library console { + address constant CONSOLE_ADDRESS = address(0x000000000000000000636F6e736F6c652e6c6f67); + + function _sendLogPayload(bytes memory payload) private view { + uint256 payloadLength = payload.length; + address consoleAddress = CONSOLE_ADDRESS; + /// @solidity memory-safe-assembly + assembly { + let payloadStart := add(payload, 32) + let r := staticcall(gas(), consoleAddress, payloadStart, payloadLength, 0, 0) + } + } + + function log() internal view { + _sendLogPayload(abi.encodeWithSignature("log()")); + } + + function logInt(int256 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(int256)", p0)); + } + + function logUint(uint256 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256)", p0)); + } + + function logString(string memory p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string)", p0)); + } + + function logBool(bool p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool)", p0)); + } + + function logAddress(address p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address)", p0)); + } + + function logBytes(bytes memory p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes)", p0)); + } + + function logBytes1(bytes1 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes1)", p0)); + } + + function logBytes2(bytes2 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes2)", p0)); + } + + function logBytes3(bytes3 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes3)", p0)); + } + + function logBytes4(bytes4 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes4)", p0)); + } + + function logBytes5(bytes5 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes5)", p0)); + } + + function logBytes6(bytes6 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes6)", p0)); + } + + function logBytes7(bytes7 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes7)", p0)); + } + + function logBytes8(bytes8 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes8)", p0)); + } + + function logBytes9(bytes9 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes9)", p0)); + } + + function logBytes10(bytes10 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes10)", p0)); + } + + function logBytes11(bytes11 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes11)", p0)); + } + + function logBytes12(bytes12 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes12)", p0)); + } + + function logBytes13(bytes13 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes13)", p0)); + } + + function logBytes14(bytes14 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes14)", p0)); + } + + function logBytes15(bytes15 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes15)", p0)); + } + + function logBytes16(bytes16 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes16)", p0)); + } + + function logBytes17(bytes17 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes17)", p0)); + } + + function logBytes18(bytes18 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes18)", p0)); + } + + function logBytes19(bytes19 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes19)", p0)); + } + + function logBytes20(bytes20 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes20)", p0)); + } + + function logBytes21(bytes21 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes21)", p0)); + } + + function logBytes22(bytes22 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes22)", p0)); + } + + function logBytes23(bytes23 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes23)", p0)); + } + + function logBytes24(bytes24 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes24)", p0)); + } + + function logBytes25(bytes25 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes25)", p0)); + } + + function logBytes26(bytes26 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes26)", p0)); + } + + function logBytes27(bytes27 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes27)", p0)); + } + + function logBytes28(bytes28 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes28)", p0)); + } + + function logBytes29(bytes29 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes29)", p0)); + } + + function logBytes30(bytes30 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes30)", p0)); + } + + function logBytes31(bytes31 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes31)", p0)); + } + + function logBytes32(bytes32 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bytes32)", p0)); + } + + function log(uint256 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256)", p0)); + } + + function log(int256 p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(int256)", p0)); + } + + function log(string memory p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string)", p0)); + } + + function log(bool p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool)", p0)); + } + + function log(address p0) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address)", p0)); + } + + function log(uint256 p0, uint256 p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256)", p0, p1)); + } + + function log(uint256 p0, string memory p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string)", p0, p1)); + } + + function log(uint256 p0, bool p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool)", p0, p1)); + } + + function log(uint256 p0, address p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address)", p0, p1)); + } + + function log(string memory p0, uint256 p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256)", p0, p1)); + } + + function log(string memory p0, int256 p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,int256)", p0, p1)); + } + + function log(string memory p0, string memory p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string)", p0, p1)); + } + + function log(string memory p0, bool p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool)", p0, p1)); + } + + function log(string memory p0, address p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address)", p0, p1)); + } + + function log(bool p0, uint256 p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256)", p0, p1)); + } + + function log(bool p0, string memory p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string)", p0, p1)); + } + + function log(bool p0, bool p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool)", p0, p1)); + } + + function log(bool p0, address p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address)", p0, p1)); + } + + function log(address p0, uint256 p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256)", p0, p1)); + } + + function log(address p0, string memory p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string)", p0, p1)); + } + + function log(address p0, bool p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool)", p0, p1)); + } + + function log(address p0, address p1) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address)", p0, p1)); + } + + function log(uint256 p0, uint256 p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,uint256)", p0, p1, p2)); + } + + function log(uint256 p0, uint256 p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,string)", p0, p1, p2)); + } + + function log(uint256 p0, uint256 p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,bool)", p0, p1, p2)); + } + + function log(uint256 p0, uint256 p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,address)", p0, p1, p2)); + } + + function log(uint256 p0, string memory p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,uint256)", p0, p1, p2)); + } + + function log(uint256 p0, string memory p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,string)", p0, p1, p2)); + } + + function log(uint256 p0, string memory p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,bool)", p0, p1, p2)); + } + + function log(uint256 p0, string memory p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,address)", p0, p1, p2)); + } + + function log(uint256 p0, bool p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,uint256)", p0, p1, p2)); + } + + function log(uint256 p0, bool p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,string)", p0, p1, p2)); + } + + function log(uint256 p0, bool p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,bool)", p0, p1, p2)); + } + + function log(uint256 p0, bool p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,address)", p0, p1, p2)); + } + + function log(uint256 p0, address p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,uint256)", p0, p1, p2)); + } + + function log(uint256 p0, address p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,string)", p0, p1, p2)); + } + + function log(uint256 p0, address p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,bool)", p0, p1, p2)); + } + + function log(uint256 p0, address p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,address)", p0, p1, p2)); + } + + function log(string memory p0, uint256 p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,uint256)", p0, p1, p2)); + } + + function log(string memory p0, uint256 p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,string)", p0, p1, p2)); + } + + function log(string memory p0, uint256 p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,bool)", p0, p1, p2)); + } + + function log(string memory p0, uint256 p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,address)", p0, p1, p2)); + } + + function log(string memory p0, string memory p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,uint256)", p0, p1, p2)); + } + + function log(string memory p0, string memory p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,string)", p0, p1, p2)); + } + + function log(string memory p0, string memory p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,bool)", p0, p1, p2)); + } + + function log(string memory p0, string memory p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,address)", p0, p1, p2)); + } + + function log(string memory p0, bool p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,uint256)", p0, p1, p2)); + } + + function log(string memory p0, bool p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,string)", p0, p1, p2)); + } + + function log(string memory p0, bool p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,bool)", p0, p1, p2)); + } + + function log(string memory p0, bool p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,address)", p0, p1, p2)); + } + + function log(string memory p0, address p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,uint256)", p0, p1, p2)); + } + + function log(string memory p0, address p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,string)", p0, p1, p2)); + } + + function log(string memory p0, address p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,bool)", p0, p1, p2)); + } + + function log(string memory p0, address p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,address)", p0, p1, p2)); + } + + function log(bool p0, uint256 p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,uint256)", p0, p1, p2)); + } + + function log(bool p0, uint256 p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,string)", p0, p1, p2)); + } + + function log(bool p0, uint256 p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,bool)", p0, p1, p2)); + } + + function log(bool p0, uint256 p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,address)", p0, p1, p2)); + } + + function log(bool p0, string memory p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,uint256)", p0, p1, p2)); + } + + function log(bool p0, string memory p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,string)", p0, p1, p2)); + } + + function log(bool p0, string memory p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,bool)", p0, p1, p2)); + } + + function log(bool p0, string memory p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,address)", p0, p1, p2)); + } + + function log(bool p0, bool p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,uint256)", p0, p1, p2)); + } + + function log(bool p0, bool p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,string)", p0, p1, p2)); + } + + function log(bool p0, bool p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,bool)", p0, p1, p2)); + } + + function log(bool p0, bool p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,address)", p0, p1, p2)); + } + + function log(bool p0, address p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,uint256)", p0, p1, p2)); + } + + function log(bool p0, address p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,string)", p0, p1, p2)); + } + + function log(bool p0, address p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,bool)", p0, p1, p2)); + } + + function log(bool p0, address p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,address)", p0, p1, p2)); + } + + function log(address p0, uint256 p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,uint256)", p0, p1, p2)); + } + + function log(address p0, uint256 p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,string)", p0, p1, p2)); + } + + function log(address p0, uint256 p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,bool)", p0, p1, p2)); + } + + function log(address p0, uint256 p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,address)", p0, p1, p2)); + } + + function log(address p0, string memory p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,uint256)", p0, p1, p2)); + } + + function log(address p0, string memory p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,string)", p0, p1, p2)); + } + + function log(address p0, string memory p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,bool)", p0, p1, p2)); + } + + function log(address p0, string memory p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,address)", p0, p1, p2)); + } + + function log(address p0, bool p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,uint256)", p0, p1, p2)); + } + + function log(address p0, bool p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,string)", p0, p1, p2)); + } + + function log(address p0, bool p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,bool)", p0, p1, p2)); + } + + function log(address p0, bool p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,address)", p0, p1, p2)); + } + + function log(address p0, address p1, uint256 p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,uint256)", p0, p1, p2)); + } + + function log(address p0, address p1, string memory p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,string)", p0, p1, p2)); + } + + function log(address p0, address p1, bool p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,bool)", p0, p1, p2)); + } + + function log(address p0, address p1, address p2) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,address)", p0, p1, p2)); + } + + function log(uint256 p0, uint256 p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,uint256,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,uint256,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,uint256,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,string,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,string,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,string,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,string,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,bool,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,bool,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,bool,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,bool,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,address,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,address,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,address,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, uint256 p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,uint256,address,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,uint256,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,uint256,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,uint256,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,string,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,string,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,string,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,string,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,bool,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,bool,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,bool,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,bool,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,address,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,address,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,address,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, string memory p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,string,address,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,uint256,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,uint256,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,uint256,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,string,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,string,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,string,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,string,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,bool,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,bool,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,bool,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,bool,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,address,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,address,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,address,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, bool p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,bool,address,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,uint256,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,uint256,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,uint256,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,string,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,string,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,string,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,string,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,bool,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,bool,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,bool,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,bool,address)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,address,uint256)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,address,string)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,address,bool)", p0, p1, p2, p3)); + } + + function log(uint256 p0, address p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(uint256,address,address,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,uint256,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,uint256,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,uint256,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,string,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,string,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,string,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,string,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,bool,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,bool,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,bool,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,bool,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,address,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,address,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,address,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, uint256 p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,uint256,address,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,uint256,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,uint256,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,uint256,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,string,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,string,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,string,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,string,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,bool,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,bool,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,bool,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,bool,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,address,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,address,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,address,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, string memory p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,string,address,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,uint256,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,uint256,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,uint256,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,string,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,string,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,string,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,string,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,bool,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,bool,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,bool,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,bool,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,address,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,address,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,address,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, bool p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,bool,address,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,uint256,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,uint256,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,uint256,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,string,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,string,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,string,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,string,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,bool,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,bool,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,bool,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,bool,address)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,address,uint256)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,address,string)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,address,bool)", p0, p1, p2, p3)); + } + + function log(string memory p0, address p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(string,address,address,address)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,uint256,string)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,uint256,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,uint256,address)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,string,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,string,string)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,string,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,string,address)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,bool,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,bool,string)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,bool,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,bool,address)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,address,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,address,string)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,address,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, uint256 p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,uint256,address,address)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,uint256,string)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,uint256,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,uint256,address)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,string,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,string,string)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,string,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,string,address)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,bool,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,bool,string)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,bool,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,bool,address)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,address,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,address,string)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,address,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, string memory p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,string,address,address)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,uint256,string)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,uint256,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,uint256,address)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,string,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,string,string)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,string,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,string,address)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,bool,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,bool,string)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,bool,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,bool,address)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,address,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,address,string)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,address,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, bool p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,bool,address,address)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,uint256,string)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,uint256,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,uint256,address)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,string,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,string,string)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,string,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,string,address)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,bool,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,bool,string)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,bool,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,bool,address)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,address,uint256)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,address,string)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,address,bool)", p0, p1, p2, p3)); + } + + function log(bool p0, address p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(bool,address,address,address)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,uint256,string)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,uint256,bool)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,uint256,address)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,string,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,string,string)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,string,bool)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,string,address)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,bool,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,bool,string)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,bool,bool)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,bool,address)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,address,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,address,string)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,address,bool)", p0, p1, p2, p3)); + } + + function log(address p0, uint256 p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,uint256,address,address)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,uint256,string)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,uint256,bool)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,uint256,address)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,string,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,string,string)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,string,bool)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,string,address)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,bool,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,bool,string)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,bool,bool)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,bool,address)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,address,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,address,string)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,address,bool)", p0, p1, p2, p3)); + } + + function log(address p0, string memory p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,string,address,address)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,uint256,string)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,uint256,bool)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,uint256,address)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,string,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,string,string)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,string,bool)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,string,address)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,bool,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,bool,string)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,bool,bool)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,bool,address)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,address,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,address,string)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,address,bool)", p0, p1, p2, p3)); + } + + function log(address p0, bool p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,bool,address,address)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, uint256 p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,uint256,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, uint256 p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,uint256,string)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, uint256 p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,uint256,bool)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, uint256 p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,uint256,address)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, string memory p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,string,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, string memory p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,string,string)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, string memory p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,string,bool)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, string memory p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,string,address)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, bool p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,bool,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, bool p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,bool,string)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, bool p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,bool,bool)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, bool p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,bool,address)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, address p2, uint256 p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,address,uint256)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, address p2, string memory p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,address,string)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, address p2, bool p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,address,bool)", p0, p1, p2, p3)); + } + + function log(address p0, address p1, address p2, address p3) internal view { + _sendLogPayload(abi.encodeWithSignature("log(address,address,address,address)", p0, p1, p2, p3)); + } + +} \ No newline at end of file diff --git a/fuzzing/testdata/contracts/cheat_codes/utils/addr.sol b/fuzzing/testdata/contracts/cheat_codes/utils/addr.sol index 6a4cd930..81aae1e4 100644 --- a/fuzzing/testdata/contracts/cheat_codes/utils/addr.sol +++ b/fuzzing/testdata/contracts/cheat_codes/utils/addr.sol @@ -8,11 +8,23 @@ contract TestContract { // Obtain our cheat code contract reference. CheatCodes cheats = CheatCodes(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D); - uint256 privateKey = 0x6df21769a2082e03f7e21f6395561279e9a7feb846b2bf740798c794ad196e00; - address expectedAddress = 0xdf8Ef652AdE0FA4790843a726164df8cf8649339; + // Test with random private key + uint256 pkOne = 0x6df21769a2082e03f7e21f6395561279e9a7feb846b2bf740798c794ad196e00; + address addrOne = 0xdf8Ef652AdE0FA4790843a726164df8cf8649339; + address result = cheats.addr(pkOne); + assert(result == addrOne); - // Call cheats.addr - address result = cheats.addr(privateKey); - assert(result == expectedAddress); + // Test with private key that requires padding + uint256 pkTwo = 1; + address addrTwo = 0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf; + result = cheats.addr(pkTwo); + assert(result == addrTwo); + + // Test with zero + uint256 pkThree = 0; + cheats.addr(pkThree); + // A private key of zero is not allowed so if we hit this assertion, then cheats.addr() did not revert which + // is incorrect + assert(false); } } diff --git a/fuzzing/testdata/contracts/cheat_codes/vm/snapshot_and_revert_to.sol b/fuzzing/testdata/contracts/cheat_codes/vm/snapshot_and_revert_to.sol new file mode 100644 index 00000000..577ff194 --- /dev/null +++ b/fuzzing/testdata/contracts/cheat_codes/vm/snapshot_and_revert_to.sol @@ -0,0 +1,58 @@ +// This test ensures that we can take a snapshot of the current state of the testchain and revert to the state at that snapshot using the snapshot and revertTo cheatcodes +pragma solidity ^0.8.0; + +interface CheatCodes { + function warp(uint256) external; + + function deal(address, uint256) external; + + function snapshot() external returns (uint256); + + function revertTo(uint256) external returns (bool); +} + +struct Storage { + uint slot0; + uint slot1; +} + +contract TestContract { + Storage store; + uint256 timestamp; + + function test() public { + // Obtain our cheat code contract reference. + CheatCodes cheats = CheatCodes( + 0x7109709ECfa91a80626fF3989D68f67F5b1DD12D + ); + + store.slot0 = 10; + store.slot1 = 20; + timestamp = block.timestamp; + cheats.deal(address(this), 5 ether); + + // Save state + uint256 snapshot = cheats.snapshot(); + + // Change state + store.slot0 = 300; + store.slot1 = 400; + cheats.deal(address(this), 500 ether); + cheats.warp(12345); + + // Assert that state has been changed + assert(store.slot0 == 300); + assert(store.slot1 == 400); + assert(address(this).balance == 500 ether); + assert(block.timestamp == 12345); + + // Revert to snapshot + cheats.revertTo(snapshot); + + // Ensure state has been reset + assert(store.slot0 == 10); + assert(store.slot1 == 20); + assert(address(this).balance == 5 ether); + assert(block.timestamp == timestamp); + } +} diff --git a/fuzzing/testdata/contracts/cheat_codes/vm/warp.sol b/fuzzing/testdata/contracts/cheat_codes/vm/warp.sol index c70dcf13..536a49ee 100644 --- a/fuzzing/testdata/contracts/cheat_codes/vm/warp.sol +++ b/fuzzing/testdata/contracts/cheat_codes/vm/warp.sol @@ -1,6 +1,6 @@ // This test ensures that the block timestamp can be set with cheat codes interface CheatCodes { - function warp(uint64) external; + function warp(uint256) external; } contract TestContract { @@ -15,5 +15,13 @@ contract TestContract { assert(block.timestamp == 7); cheats.warp(9); assert(block.timestamp == 9); + + // Ensure that a value greater than type(uint64).max will cause warp to revert + // This is not the best way to test it but gets the job done + try cheats.warp(type(uint64).max + 1) { + assert(false); + } catch { + assert(true); + } } } diff --git a/fuzzing/testdata/contracts/corpus_mutation/specific_call_sequence.sol b/fuzzing/testdata/contracts/corpus_mutation/specific_call_sequence.sol index e66c3771..b4356dac 100644 --- a/fuzzing/testdata/contracts/corpus_mutation/specific_call_sequence.sol +++ b/fuzzing/testdata/contracts/corpus_mutation/specific_call_sequence.sol @@ -29,7 +29,7 @@ contract TestContract { } } - function fuzz_solve_me() public view returns (bool) { + function property_solve_me() public view returns (bool) { // ASSERTION: The fuzzer should be able to fail this test case and solve all challenges. return index < 7; } diff --git a/fuzzing/testdata/contracts/deployments/deploy_payable_constructors.sol b/fuzzing/testdata/contracts/deployments/deploy_payable_constructors.sol new file mode 100644 index 00000000..f976e48f --- /dev/null +++ b/fuzzing/testdata/contracts/deployments/deploy_payable_constructors.sol @@ -0,0 +1,25 @@ +// This source file provides two contracts to test whether we are able to send ether to payable constructors. FirstContract +// should get no ether and while SecondContract should receive 1 ether. +contract FirstContract { + constructor() payable {} + + function property_contract_has_no_balance() public returns(bool) { + return address(this).balance == 0; + } + + // This exists so the fuzzer knows there are state changing methods to target, instead of quitting early. + function dummy() public {} +} + + +contract SecondContract { + constructor() payable {} + + function property_contract_has_balance() public returns(bool) { + return address(this).balance == 1 ether; + } + + // This exists so the fuzzer knows there are state changing methods to target, instead of quitting early. + function dummy() public {} + +} diff --git a/fuzzing/testdata/contracts/deployments/deployment_order.sol b/fuzzing/testdata/contracts/deployments/deployment_order.sol index efdff8bc..f7c6f3ed 100644 --- a/fuzzing/testdata/contracts/deployments/deployment_order.sol +++ b/fuzzing/testdata/contracts/deployments/deployment_order.sol @@ -15,7 +15,7 @@ contract InheritedFirstContract is FirstContract { y = value + 9; } - function fuzz_never_specific_values() public view returns (bool) { + function property_never_specific_values() public view returns (bool) { // ASSERTION: x should never be 10 at the same time y is 80 return !(x == 10 && y == 80); } @@ -41,7 +41,7 @@ contract InheritedSecondContract is SecondContract { c = value + 7; } - function fuzz_never_specific_values() public view returns (bool) { + function property_never_specific_values() public view returns (bool) { // ASSERTION: a should never be 10 at the same time b is 80 at the same time c is 14 return !(a == 10 && b == 80 && c == 14); } diff --git a/fuzzing/testdata/contracts/deployments/deployment_with_args.sol b/fuzzing/testdata/contracts/deployments/deployment_with_args.sol index 11e62d12..05991266 100644 --- a/fuzzing/testdata/contracts/deployments/deployment_with_args.sol +++ b/fuzzing/testdata/contracts/deployments/deployment_with_args.sol @@ -15,15 +15,15 @@ contract DeploymentWithArgs { z = _z; } - function fuzz_checkX() public returns (bool) { + function property_checkX() public returns (bool) { return x != 123456789; } - function fuzz_checkY() public returns (bool) { + function property_checkY() public returns (bool) { return y != 0x5465; } - function fuzz_checkZ() public returns (bool) { + function property_checkZ() public returns (bool) { return z.a != 0x4d2; } @@ -40,7 +40,7 @@ contract Dependent { deployed = _deployed; } - function fuzz_checkDeployed() public returns (bool) { + function property_checkDeployed() public returns (bool) { return deployed == 0x0000000000000000000000000000000000000000; } diff --git a/fuzzing/testdata/contracts/deployments/inner_deployment.sol b/fuzzing/testdata/contracts/deployments/inner_deployment.sol index d853687e..dc26b5b7 100644 --- a/fuzzing/testdata/contracts/deployments/inner_deployment.sol +++ b/fuzzing/testdata/contracts/deployments/inner_deployment.sol @@ -1,7 +1,7 @@ // InnerDeploymentFactory deploys InnerDeployment when a method is called after deployment, and verifies the fuzzer can // match bytecode and fail the test appropriately. contract InnerDeployment { - function fuzz_inner_deployment() public view returns (bool) { + function property_inner_deployment() public view returns (bool) { // ASSERTION: Fail immediately. return false; } diff --git a/fuzzing/testdata/contracts/deployments/inner_deployment_on_construction.sol b/fuzzing/testdata/contracts/deployments/inner_deployment_on_construction.sol index 15aec27c..4e08a111 100644 --- a/fuzzing/testdata/contracts/deployments/inner_deployment_on_construction.sol +++ b/fuzzing/testdata/contracts/deployments/inner_deployment_on_construction.sol @@ -6,7 +6,7 @@ contract InnerDeployment { x = 7; } - function fuzz_inner_deployment() public view returns (bool) { + function property_inner_deployment() public view returns (bool) { // ASSERTION: Fail immediately. return false; } diff --git a/fuzzing/testdata/contracts/deployments/inner_inner_deployment.sol b/fuzzing/testdata/contracts/deployments/inner_inner_deployment.sol index 8a1b1140..7718fc82 100644 --- a/fuzzing/testdata/contracts/deployments/inner_inner_deployment.sol +++ b/fuzzing/testdata/contracts/deployments/inner_inner_deployment.sol @@ -2,7 +2,7 @@ // deployed, a method can be used to deploy an InnerInnerDeployment. We verify we can violate an invariant // in a two-layer deep dynamic deployment. contract InnerInnerDeployment { - function fuzz_inner_inner_deployment() public view returns (bool) { + function property_inner_inner_deployment() public view returns (bool) { // ASSERTION: Fail immediately. return false; } diff --git a/fuzzing/testdata/contracts/deployments/internal_library.sol b/fuzzing/testdata/contracts/deployments/internal_library.sol index ba03a08a..27201d43 100644 --- a/fuzzing/testdata/contracts/deployments/internal_library.sol +++ b/fuzzing/testdata/contracts/deployments/internal_library.sol @@ -28,7 +28,7 @@ contract TestInternalLibrary { return a + b; } - function fuzz_library_linking_broken() public view returns (bool) { + function property_library_linking_broken() public view returns (bool) { // ASSERTION: We should always be able to compute correctly. return !failedTest; } diff --git a/fuzzing/testdata/contracts/deployments/predeploy_contract.sol b/fuzzing/testdata/contracts/deployments/predeploy_contract.sol new file mode 100644 index 00000000..bca678e4 --- /dev/null +++ b/fuzzing/testdata/contracts/deployments/predeploy_contract.sol @@ -0,0 +1,15 @@ +contract PredeployContract { + function triggerFailure() public { + assert(false); + } +} + +contract TestContract { + PredeployContract predeploy = PredeployContract(address(0x1234)); + + constructor() payable {} + + function testPredeploy() public { + predeploy.triggerFailure(); + } +} diff --git a/fuzzing/testdata/contracts/deployments/testing_scope.sol b/fuzzing/testdata/contracts/deployments/testing_scope.sol index af4f6605..e98aef38 100644 --- a/fuzzing/testdata/contracts/deployments/testing_scope.sol +++ b/fuzzing/testdata/contracts/deployments/testing_scope.sol @@ -6,7 +6,7 @@ contract TestContractChild { assert(false); } - function fuzz_failing_property_test_method_child() public view returns (bool) { + function property_failing_property_test_method_child() public view returns (bool) { return false; } } @@ -22,7 +22,7 @@ contract TestContract { assert(false); } - function fuzz_failing_property_test_method() public view returns (bool) { + function property_failing_property_test_method() public view returns (bool) { return false; } } diff --git a/fuzzing/testdata/contracts/execution_tracing/proxy_call.sol b/fuzzing/testdata/contracts/execution_tracing/proxy_call.sol index 20e90237..32ae472c 100644 --- a/fuzzing/testdata/contracts/execution_tracing/proxy_call.sol +++ b/fuzzing/testdata/contracts/execution_tracing/proxy_call.sol @@ -17,7 +17,7 @@ contract TestContract { i = new InnerDeploymentContract(); } - function testDelegateCall() public returns (address) { + function testDelegateCall() public { // Perform a delegate call to set our variables in this contract. (bool success, bytes memory data) = address(i).delegatecall(abi.encodeWithSignature("setXY(uint256,uint256,string)", 123, 321, "Hello from proxy call args!")); diff --git a/fuzzing/testdata/contracts/filtering/target_and_exclude.sol b/fuzzing/testdata/contracts/filtering/target_and_exclude.sol new file mode 100644 index 00000000..65c76dce --- /dev/null +++ b/fuzzing/testdata/contracts/filtering/target_and_exclude.sol @@ -0,0 +1,35 @@ +// This contract ensures that we can target or exclude functions +contract TestContract { + uint odd_counter = 1; + uint even_counter = 2; + event Counter(uint256 value); + function f() public { + odd_counter += 1; + emit Counter(odd_counter); + } + + function g() public { + even_counter += 2; + emit Counter(even_counter); + + } + + function h() public { + odd_counter += 3; + emit Counter(odd_counter); + + } + + function i() public { + even_counter += 4; + emit Counter(even_counter); + } + + function property_a() public view returns (bool) { + return (odd_counter != 100); + } + + function optimize_b() public view returns (int256) { + return -1; + } +} diff --git a/fuzzing/testdata/contracts/optimizations/optimize.sol b/fuzzing/testdata/contracts/optimizations/optimize.sol new file mode 100644 index 00000000..3be1e27a --- /dev/null +++ b/fuzzing/testdata/contracts/optimizations/optimize.sol @@ -0,0 +1,14 @@ +contract TestContract { + int256 input; + + function set(int256 _input) public { + input = _input; + } + + function optimize_opt_linear() public view returns (int256) { + if (input > -4242) + return -input; + else + return 0; + } +} diff --git a/fuzzing/testdata/contracts/value_generation/ast_value_extraction.sol b/fuzzing/testdata/contracts/value_generation/ast_value_extraction.sol new file mode 100644 index 00000000..02f2e99c --- /dev/null +++ b/fuzzing/testdata/contracts/value_generation/ast_value_extraction.sol @@ -0,0 +1,48 @@ +// This contract verifies the fuzzer can extract AST literals of different subdenominations from the file. +contract TestContract { + function addressValues() public { + address x = 0x7109709ECfa91a80626fF3989D68f67F5b1DD12D; + assert(x != address(0x1234567890123456789012345678901234567890)); + } + function uintValues() public { + // Use all integer denoms + uint x = 111; + x = 1 wei; + x = 2 gwei; + //x = 3 szabo; + //x = 4 finney; + x = 5 ether; + x = 6 seconds; + x = 7 minutes; + x = 8 hours; + x = 9 days; + x = 10 weeks; + //x = 11 years; + + // Dummy assertion that should always pass. + assert(x != 0); + } + function intValues() public { + // Use all integer denoms + int x = -111; + x = -1 wei; + x = -2 gwei; + //x = -3 szabo; + //x = -4 finney; + x = -5 ether; + x = -6 seconds; + x = -7 minutes; + x = -8 hours; + x = -9 days; + x = -10 weeks; + //x = -11 years; + + // Dummy assertion that should always pass. + assert(x != 0); + } + function stringValues() public { + string memory s = "testString"; + s = "testString2"; + assert(true); + } +} diff --git a/fuzzing/testdata/contracts/value_generation/generate_all_types.sol b/fuzzing/testdata/contracts/value_generation/generate_all_types.sol index a4d0114c..e9a6fa75 100644 --- a/fuzzing/testdata/contracts/value_generation/generate_all_types.sol +++ b/fuzzing/testdata/contracts/value_generation/generate_all_types.sol @@ -24,7 +24,7 @@ contract GenerateAllTypes { s = ""; } - function fuzz_never_fail() public view returns (bool) { + function property_never_fail() public view returns (bool) { // ASSERTION: never fail, to keep testing value generation return true; } diff --git a/fuzzing/testdata/contracts/value_generation/match_addr_contract.sol b/fuzzing/testdata/contracts/value_generation/match_addr_contract.sol index 27654a16..9a5cdb7e 100644 --- a/fuzzing/testdata/contracts/value_generation/match_addr_contract.sol +++ b/fuzzing/testdata/contracts/value_generation/match_addr_contract.sol @@ -6,7 +6,7 @@ contract TestContract { a = value; } - function fuzz_never_specific_values() public view returns (bool) { + function property_never_specific_values() public view returns (bool) { // ASSERTION: a should not be the contract's address itself. return !(a == address(this)); } diff --git a/fuzzing/testdata/contracts/value_generation/match_addr_exact.sol b/fuzzing/testdata/contracts/value_generation/match_addr_exact.sol index 385e2bfb..b46d594e 100644 --- a/fuzzing/testdata/contracts/value_generation/match_addr_exact.sol +++ b/fuzzing/testdata/contracts/value_generation/match_addr_exact.sol @@ -12,7 +12,7 @@ contract TestContract { y = value; } - function fuzz_never_specific_values() public view returns (bool) { + function property_never_specific_values() public view returns (bool) { // ASSERTION: x and y should not equal the exact addresses below. return !(x == address(0x12345) && y == address(7)); } diff --git a/fuzzing/testdata/contracts/value_generation/match_addr_sender.sol b/fuzzing/testdata/contracts/value_generation/match_addr_sender.sol index 6be91398..b67d80e4 100644 --- a/fuzzing/testdata/contracts/value_generation/match_addr_sender.sol +++ b/fuzzing/testdata/contracts/value_generation/match_addr_sender.sol @@ -8,7 +8,7 @@ contract TestContract { sender = msg.sender; } - function fuzz_never_specific_values() public view returns (bool) { + function property_never_specific_values() public view returns (bool) { // ASSERTION: a should not be sender's address who set it. return a != sender; } diff --git a/fuzzing/testdata/contracts/value_generation/match_ints_xy.sol b/fuzzing/testdata/contracts/value_generation/match_ints_xy.sol index 2cb5ca2b..3aeff78e 100644 --- a/fuzzing/testdata/contracts/value_generation/match_ints_xy.sol +++ b/fuzzing/testdata/contracts/value_generation/match_ints_xy.sol @@ -12,7 +12,7 @@ contract TestContract { } - function fuzz_never_specific_values() public view returns (bool) { + function property_never_specific_values() public view returns (bool) { // ASSERTION: x should never be -10 at the same time y is -62 return !(x == -10 && y == -62); } diff --git a/fuzzing/testdata/contracts/value_generation/match_payable_xy.sol b/fuzzing/testdata/contracts/value_generation/match_payable_xy.sol index 68221d87..b885c42e 100644 --- a/fuzzing/testdata/contracts/value_generation/match_payable_xy.sol +++ b/fuzzing/testdata/contracts/value_generation/match_payable_xy.sol @@ -11,7 +11,7 @@ contract TestContract { paidAmount2 = msg.value; } - function fuzz_never_pay_exact_amounts() public view returns (bool) { + function property_never_pay_exact_amounts() public view returns (bool) { // ASSERTION: paid amounts should never equal the exact numbers below. return !(paidAmount == 7777 && paidAmount2 == 8888); } diff --git a/fuzzing/testdata/contracts/value_generation/match_string_exact.sol b/fuzzing/testdata/contracts/value_generation/match_string_exact.sol index 6dda6770..80f15fae 100644 --- a/fuzzing/testdata/contracts/value_generation/match_string_exact.sol +++ b/fuzzing/testdata/contracts/value_generation/match_string_exact.sol @@ -6,7 +6,7 @@ contract TestContract { s = value; } - function fuzz_never_specific_values() public view returns (bool) { + function property_never_specific_values() public view returns (bool) { // ASSERTION: s should not be the MAGIC_STRING return keccak256(abi.encodePacked((s))) != keccak256(abi.encodePacked((MAGIC_STRING))); } diff --git a/fuzzing/testdata/contracts/value_generation/match_structs_xy.sol b/fuzzing/testdata/contracts/value_generation/match_structs_xy.sol index cc6d8495..817e079b 100644 --- a/fuzzing/testdata/contracts/value_generation/match_structs_xy.sol +++ b/fuzzing/testdata/contracts/value_generation/match_structs_xy.sol @@ -23,7 +23,7 @@ contract TestContract { s = ts; } - function fuzz_never_specific_values() public view returns (bool) { + function property_never_specific_values() public view returns (bool) { // ASSERTION: x should never be 10 at the same time y is 80 return !(s.x == 10 && s.i.y == 80); } diff --git a/fuzzing/testdata/contracts/value_generation/match_uints_xy.sol b/fuzzing/testdata/contracts/value_generation/match_uints_xy.sol index a064f423..d465708a 100644 --- a/fuzzing/testdata/contracts/value_generation/match_uints_xy.sol +++ b/fuzzing/testdata/contracts/value_generation/match_uints_xy.sol @@ -12,7 +12,7 @@ contract TestContract { } - function fuzz_never_specific_values() public view returns (bool) { + function property_never_specific_values() public view returns (bool) { // ASSERTION: x should never be 10 at the same time y is 80 return !(x == 10 && y == 80); } diff --git a/fuzzing/testdata/contracts/vm_tests/block_hash_store_check.sol b/fuzzing/testdata/contracts/vm_tests/block_hash_store_check.sol index d9f3d8df..cbc757b8 100644 --- a/fuzzing/testdata/contracts/vm_tests/block_hash_store_check.sol +++ b/fuzzing/testdata/contracts/vm_tests/block_hash_store_check.sol @@ -49,7 +49,7 @@ contract TestContract { lastBlockNumber = block.number; } - function fuzz_violate_block_hash_continuity() public view returns (bool) { + function property_violate_block_hash_continuity() public view returns (bool) { // ASSERTION: we fail if our blockHash works as expected so our fuzzer will catch it. return !failedTest; } diff --git a/fuzzing/testdata/contracts/vm_tests/block_number_increasing.sol b/fuzzing/testdata/contracts/vm_tests/block_number_increasing.sol index cbcecab4..b8a6450c 100644 --- a/fuzzing/testdata/contracts/vm_tests/block_number_increasing.sol +++ b/fuzzing/testdata/contracts/vm_tests/block_number_increasing.sol @@ -10,7 +10,7 @@ contract TestContract { // This method does nothing but is left exposed so it can be called by the fuzzer to advance block.number } - function fuzz_increase_block_number_by_10() public view returns (bool) { + function property_increase_block_number_by_10() public view returns (bool) { // ASSERTION: block number should never increase more than 10 (we expect failure) return !(block.number - startingBlockNumber >= 10); } diff --git a/fuzzing/testdata/contracts/vm_tests/block_timestamp_increasing.sol b/fuzzing/testdata/contracts/vm_tests/block_timestamp_increasing.sol index f14bbaa7..7d6b0961 100644 --- a/fuzzing/testdata/contracts/vm_tests/block_timestamp_increasing.sol +++ b/fuzzing/testdata/contracts/vm_tests/block_timestamp_increasing.sol @@ -10,7 +10,7 @@ contract TestContract { // This method does nothing but is left exposed so it can be called by the fuzzer to advance blocks/timestamps. } - function fuzz_increase_block_timestamp() public view returns (bool) { + function property_increase_block_timestamp() public view returns (bool) { // ASSERTION: block timestamp should never increase more than 10 (we expect failure) return !(block.timestamp - startingBlockTimestamp >= 10); } diff --git a/fuzzing/utils/fuzz_method_utils.go b/fuzzing/utils/fuzz_method_utils.go new file mode 100644 index 00000000..70b77a12 --- /dev/null +++ b/fuzzing/utils/fuzz_method_utils.go @@ -0,0 +1,52 @@ +package utils + +import ( + "strings" + + compilationTypes "github.com/crytic/medusa/compilation/types" + "github.com/ethereum/go-ethereum/accounts/abi" +) + +// IsOptimizationTest checks whether the method is an optimization test given potential naming prefixes it must conform to +// and its underlying input/output arguments. +func IsOptimizationTest(method abi.Method, prefixes []string) bool { + // Loop through all enabled prefixes to find a match + for _, prefix := range prefixes { + if strings.HasPrefix(method.Name, prefix) { + // An optimization test must take no inputs and return an int256 + if len(method.Inputs) == 0 && len(method.Outputs) == 1 && method.Outputs[0].Type.T == abi.IntTy && method.Outputs[0].Type.Size == 256 { + return true + } + } + } + return false +} + +// IsPropertyTest checks whether the method is a property test given potential naming prefixes it must conform to +// and its underlying input/output arguments. +func IsPropertyTest(method abi.Method, prefixes []string) bool { + // Loop through all enabled prefixes to find a match + for _, prefix := range prefixes { + // The property test must simply have the right prefix and take no inputs and return a boolean + if strings.HasPrefix(method.Name, prefix) { + if len(method.Inputs) == 0 && len(method.Outputs) == 1 && method.Outputs[0].Type.T == abi.BoolTy { + return true + } + } + } + return false +} + +// BinTestByType sorts a contract's methods by whether they are assertion, property, or optimization tests. +func BinTestByType(contract *compilationTypes.CompiledContract, propertyTestPrefixes, optimizationTestPrefixes []string, testViewMethods bool) (assertionTests, propertyTests, optimizationTests []abi.Method) { + for _, method := range contract.Abi.Methods { + if IsPropertyTest(method, propertyTestPrefixes) { + propertyTests = append(propertyTests, method) + } else if IsOptimizationTest(method, optimizationTestPrefixes) { + optimizationTests = append(optimizationTests, method) + } else if !method.IsConstant() || testViewMethods { + assertionTests = append(assertionTests, method) + } + } + return assertionTests, propertyTests, optimizationTests +} diff --git a/fuzzing/valuegeneration/abi_values.go b/fuzzing/valuegeneration/abi_values.go index b7b91cc5..c0610efc 100644 --- a/fuzzing/valuegeneration/abi_values.go +++ b/fuzzing/valuegeneration/abi_values.go @@ -3,6 +3,7 @@ package valuegeneration import ( "encoding/hex" "fmt" + "github.com/crytic/medusa/logging" "math/big" "reflect" "strconv" @@ -93,13 +94,16 @@ func GenerateAbiValue(generator ValueGenerator, inputType *abi.Type) any { // - Mappings cannot be used in public/external methods and must reference storage, so we shouldn't ever // see cases of it unless Solidity was updated in the future. // - FixedPoint types are currently unsupported. - panic(fmt.Sprintf("attempt to generate function argument of unsupported type: '%s'", inputType.String())) + + err := fmt.Errorf("attempt to generate function argument of unsupported type: '%s'", inputType.String()) + logging.GlobalLogger.Panic("Failed to generate abi value", err) + return nil } } // MutateAbiValue takes an ABI packable input value, alongside its type definition and a value generator, to mutate // existing ABI input values. -func MutateAbiValue(generator ValueGenerator, inputType *abi.Type, value any) (any, error) { +func MutateAbiValue(generator ValueGenerator, mutator ValueMutator, inputType *abi.Type, value any) (any, error) { // Switch on the type of value and mutate it recursively. switch inputType.T { case abi.AddressTy: @@ -107,38 +111,38 @@ func MutateAbiValue(generator ValueGenerator, inputType *abi.Type, value any) (a if !ok { return nil, fmt.Errorf("could not mutate address input as the value provided is not an address type") } - return generator.MutateAddress(addr), nil + return mutator.MutateAddress(addr), nil case abi.UintTy: if inputType.Size == 64 { v, ok := value.(uint64) if !ok { return nil, fmt.Errorf("could not mutate uint%v input as the value provided is not of the correct type", inputType.Size) } - return generator.MutateInteger(new(big.Int).SetUint64(v), false, inputType.Size).Uint64(), nil + return mutator.MutateInteger(new(big.Int).SetUint64(v), false, inputType.Size).Uint64(), nil } else if inputType.Size == 32 { v, ok := value.(uint32) if !ok { return nil, fmt.Errorf("could not mutate uint%v input as the value provided is not of the correct type", inputType.Size) } - return uint32(generator.MutateInteger(new(big.Int).SetUint64(uint64(v)), false, inputType.Size).Uint64()), nil + return uint32(mutator.MutateInteger(new(big.Int).SetUint64(uint64(v)), false, inputType.Size).Uint64()), nil } else if inputType.Size == 16 { v, ok := value.(uint16) if !ok { return nil, fmt.Errorf("could not mutate uint%v input as the value provided is not of the correct type", inputType.Size) } - return uint16(generator.MutateInteger(new(big.Int).SetUint64(uint64(v)), false, inputType.Size).Uint64()), nil + return uint16(mutator.MutateInteger(new(big.Int).SetUint64(uint64(v)), false, inputType.Size).Uint64()), nil } else if inputType.Size == 8 { v, ok := value.(uint8) if !ok { return nil, fmt.Errorf("could not mutate uint%v input as the value provided is not of the correct type", inputType.Size) } - return uint8(generator.MutateInteger(new(big.Int).SetUint64(uint64(v)), false, inputType.Size).Uint64()), nil + return uint8(mutator.MutateInteger(new(big.Int).SetUint64(uint64(v)), false, inputType.Size).Uint64()), nil } else { v, ok := value.(*big.Int) if !ok { return nil, fmt.Errorf("could not mutate uint%v input as the value provided is not of the correct type", inputType.Size) } - return generator.MutateInteger(new(big.Int).Set(v), false, inputType.Size), nil + return mutator.MutateInteger(new(big.Int).Set(v), false, inputType.Size), nil } case abi.IntTy: if inputType.Size == 64 { @@ -146,55 +150,55 @@ func MutateAbiValue(generator ValueGenerator, inputType *abi.Type, value any) (a if !ok { return nil, fmt.Errorf("could not mutate int%v input as the value provided is not of the correct type", inputType.Size) } - return generator.MutateInteger(new(big.Int).SetInt64(v), true, inputType.Size).Int64(), nil + return mutator.MutateInteger(new(big.Int).SetInt64(v), true, inputType.Size).Int64(), nil } else if inputType.Size == 32 { v, ok := value.(int32) if !ok { return nil, fmt.Errorf("could not mutate int%v input as the value provided is not of the correct type", inputType.Size) } - return int32(generator.MutateInteger(new(big.Int).SetInt64(int64(v)), true, inputType.Size).Int64()), nil + return int32(mutator.MutateInteger(new(big.Int).SetInt64(int64(v)), true, inputType.Size).Int64()), nil } else if inputType.Size == 16 { v, ok := value.(int16) if !ok { return nil, fmt.Errorf("could not mutate int%v input as the value provided is not of the correct type", inputType.Size) } - return int16(generator.MutateInteger(new(big.Int).SetInt64(int64(v)), true, inputType.Size).Int64()), nil + return int16(mutator.MutateInteger(new(big.Int).SetInt64(int64(v)), true, inputType.Size).Int64()), nil } else if inputType.Size == 8 { v, ok := value.(int8) if !ok { return nil, fmt.Errorf("could not mutate int%v input as the value provided is not of the correct type", inputType.Size) } - return int8(generator.MutateInteger(new(big.Int).SetInt64(int64(v)), true, inputType.Size).Int64()), nil + return int8(mutator.MutateInteger(new(big.Int).SetInt64(int64(v)), true, inputType.Size).Int64()), nil } else { v, ok := value.(*big.Int) if !ok { return nil, fmt.Errorf("could not mutate int%v input as the value provided is not of the correct type", inputType.Size) } - return generator.MutateInteger(new(big.Int).Set(v), true, inputType.Size), nil + return mutator.MutateInteger(new(big.Int).Set(v), true, inputType.Size), nil } case abi.BoolTy: v, ok := value.(bool) if !ok { return nil, fmt.Errorf("could not mutate boolean input as the value provided is not a boolean type") } - return generator.MutateBool(v), nil + return mutator.MutateBool(v), nil case abi.StringTy: v, ok := value.(string) if !ok { return nil, fmt.Errorf("could not mutate string input as the value provided is not a string type") } - return generator.MutateString(v), nil + return mutator.MutateString(v), nil case abi.BytesTy: v, ok := value.([]byte) if !ok { return nil, fmt.Errorf("could not mutate dynamic-sized bytes input as the value provided is not a byte slice type") } - return generator.MutateBytes(v), nil + return mutator.MutateBytes(v), nil case abi.FixedBytesTy: // This needs to be an array type, not a slice. But arrays can't be dynamically defined without reflection. // We opt to keep our API for generators simple, creating the array here and copying elements from a slice. valueAsSlice := reflectionutils.ArrayToSlice(reflect.ValueOf(value)).([]byte) - mutatedValue := generator.MutateFixedBytes(valueAsSlice) + mutatedValue := mutator.MutateFixedBytes(valueAsSlice) mutatedValueAsArray := reflectionutils.SliceToArray(reflect.ValueOf(mutatedValue)) mutatedValueAsArrayLen := reflect.ValueOf(mutatedValueAsArray).Len() if mutatedValueAsArrayLen != inputType.Size { @@ -207,7 +211,7 @@ func MutateAbiValue(generator ValueGenerator, inputType *abi.Type, value any) (a array := reflectionutils.CopyReflectedType(reflect.ValueOf(value)) // Mutate our array structure first - mutatedValues := generator.MutateArray(reflectionutils.GetReflectedArrayValues(array), true) + mutatedValues := mutator.MutateArray(reflectionutils.GetReflectedArrayValues(array), true) // Create a new array of the appropriate size array = reflect.New(reflect.ArrayOf(array.Len(), array.Type().Elem())).Elem() @@ -222,7 +226,7 @@ func MutateAbiValue(generator ValueGenerator, inputType *abi.Type, value any) (a generatedElement := GenerateAbiValue(generator, inputType.Elem) reflectedElement.Set(reflect.ValueOf(generatedElement)) } else { - mutatedElement, err := MutateAbiValue(generator, inputType.Elem, mutatedValues[i]) + mutatedElement, err := MutateAbiValue(generator, mutator, inputType.Elem, mutatedValues[i]) if err != nil { return nil, fmt.Errorf("could not mutate array input as the value generator encountered an error: %v", err) } @@ -237,7 +241,7 @@ func MutateAbiValue(generator ValueGenerator, inputType *abi.Type, value any) (a slice := reflectionutils.CopyReflectedType(reflect.ValueOf(value)) // Mutate our slice structure first - mutatedValues := generator.MutateArray(reflectionutils.GetReflectedArrayValues(slice), false) + mutatedValues := mutator.MutateArray(reflectionutils.GetReflectedArrayValues(slice), false) // Create a new slice of the appropriate size slice = reflect.MakeSlice(reflect.SliceOf(slice.Type().Elem()), len(mutatedValues), len(mutatedValues)) @@ -252,7 +256,7 @@ func MutateAbiValue(generator ValueGenerator, inputType *abi.Type, value any) (a generatedElement := GenerateAbiValue(generator, inputType.Elem) reflectedElement.Set(reflect.ValueOf(generatedElement)) } else { - mutatedElement, err := MutateAbiValue(generator, inputType.Elem, mutatedValues[i]) + mutatedElement, err := MutateAbiValue(generator, mutator, inputType.Elem, mutatedValues[i]) if err != nil { return nil, fmt.Errorf("could not mutate slice input as the value generator encountered an error: %v", err) } @@ -267,7 +271,7 @@ func MutateAbiValue(generator ValueGenerator, inputType *abi.Type, value any) (a for i := 0; i < len(inputType.TupleElems); i++ { field := tuple.Field(i) fieldValue := reflectionutils.GetField(field) - mutatedValue, err := MutateAbiValue(generator, inputType.TupleElems[i], fieldValue) + mutatedValue, err := MutateAbiValue(generator, mutator, inputType.TupleElems[i], fieldValue) if err != nil { return nil, fmt.Errorf("could not mutate struct/tuple input as the value generator encountered an error: %v", err) } @@ -439,21 +443,17 @@ func encodeABIArgumentToString(inputType *abi.Type, value any) (string, error) { } return strconv.QuoteToASCII(str), nil case abi.BytesTy: - // Prepare a byte array. Return as a string enclosed with "". The returned string uses Go escape - // sequences (\t, \n, \xFF, \u0100) for non-ASCII characters and non-printable characters. b, ok := value.([]byte) if !ok { return "", fmt.Errorf("could not encode dynamic-sized bytes as the value provided is not of the correct type") } - return strconv.QuoteToASCII(string(b)), nil + // Convert the fixed byte array to a hex string + return hex.EncodeToString(b), nil case abi.FixedBytesTy: - // Prepare a fixed-size byte array. Return as a string enclosed with "". The returned string uses Go escape - // sequences (\t, \n, \xFF, \u0100) for non-ASCII characters and non-printable characters. - // TODO: Error checking to ensure `value` is of the correct type. b := reflectionutils.ArrayToSlice(reflect.ValueOf(value)).([]byte) - // Convert the byte array to a string and use the QuoteToASCII method to format the string with Go escape sequences. - return strconv.QuoteToASCII(string(b)), nil + // Convert the byte array to a hex string + return hex.EncodeToString(b), nil case abi.ArrayTy: // Prepare an array. Return as a string enclosed with [], where specific elements are comma-separated. reflectedArray := reflect.ValueOf(value) @@ -675,7 +675,7 @@ func DecodeJSONArgumentsFromMap(inputs abi.Arguments, values map[string]any, dep for i, input := range inputs { value, ok := values[input.Name] if !ok { - err := fmt.Errorf("constructor argument not provided for: name: %v", input.Name) + err := fmt.Errorf("value not not provided for argument: name: %v", input.Name) return nil, err } arg, err := decodeJSONArgument(&input.Type, value, deployedContractAddr) diff --git a/fuzzing/valuegeneration/abi_values_test.go b/fuzzing/valuegeneration/abi_values_test.go index 8a926656..30c374ae 100644 --- a/fuzzing/valuegeneration/abi_values_test.go +++ b/fuzzing/valuegeneration/abi_values_test.go @@ -247,7 +247,7 @@ func TestABIRoundtripEncodingAllTypes(t *testing.T) { // re-encoded data matches the originally encoded data. func TestABIGenerationAndMutation(t *testing.T) { // Create a value generator - valueGenConfig := &MutatingValueGeneratorConfig{ + mutationalGeneratorConfig := &MutationalValueGeneratorConfig{ MinMutationRounds: 0, MaxMutationRounds: 1, GenerateRandomAddressBias: 0.5, @@ -273,7 +273,7 @@ func TestABIGenerationAndMutation(t *testing.T) { GenerateRandomStringMaxSize: 100, }, } - valueGenerator := NewMutatingValueGenerator(valueGenConfig, NewValueSet(), rand.New(rand.NewSource(time.Now().UnixNano()))) + mutationalGenerator := NewMutationalValueGenerator(mutationalGeneratorConfig, NewValueSet(), rand.New(rand.NewSource(time.Now().UnixNano()))) // Obtain our test ABI arguments args := getTestABIArguments() @@ -283,10 +283,10 @@ func TestABIGenerationAndMutation(t *testing.T) { // Test each argument round trip serialization with different generated values (iterate a number of times). for i := 0; i < 5; i++ { // Generate a value for this argument - value := GenerateAbiValue(valueGenerator, &arg.Type) + value := GenerateAbiValue(mutationalGenerator, &arg.Type) // Mutate and ensure no error occurred. - mutatedValue, err := MutateAbiValue(valueGenerator, &arg.Type, value) + mutatedValue, err := MutateAbiValue(mutationalGenerator, mutationalGenerator, &arg.Type, value) assert.NoError(t, err) // Verify the types of the value and mutated value are the same diff --git a/fuzzing/valuegeneration/generator.go b/fuzzing/valuegeneration/generator.go new file mode 100644 index 00000000..78518977 --- /dev/null +++ b/fuzzing/valuegeneration/generator.go @@ -0,0 +1,31 @@ +package valuegeneration + +import ( + "github.com/ethereum/go-ethereum/common" + "math/big" +) + +// ValueGenerator represents an interface for a provider used to generate function inputs and call arguments for use +// in fuzzing campaigns. +type ValueGenerator interface { + // GenerateAddress generates/selects an address to use when populating inputs. + GenerateAddress() common.Address + + // GenerateArrayOfLength generates/selects an array length to use when populating inputs. + GenerateArrayOfLength() int + + // GenerateBool generates/selects a bool to use when populating inputs. + GenerateBool() bool + + // GenerateBytes generates/selects a dynamic-sized byte array to use when populating inputs. + GenerateBytes() []byte + + // GenerateFixedBytes generates/selects a fixed-sized byte array to use when populating inputs. + GenerateFixedBytes(length int) []byte + + // GenerateString generates/selects a dynamic-sized string to use when populating inputs. + GenerateString() string + + // GenerateInteger generates/selects an integer to use when populating inputs. + GenerateInteger(signed bool, bitLength int) *big.Int +} diff --git a/fuzzing/valuegeneration/value_generator_mutating.go b/fuzzing/valuegeneration/generator_mutational.go similarity index 83% rename from fuzzing/valuegeneration/value_generator_mutating.go rename to fuzzing/valuegeneration/generator_mutational.go index baa80970..bf12ba8c 100644 --- a/fuzzing/valuegeneration/value_generator_mutating.go +++ b/fuzzing/valuegeneration/generator_mutational.go @@ -8,13 +8,13 @@ import ( "math/rand" ) -// MutatingValueGenerator is a provider used to generate function inputs and call arguments using mutation-based -// approaches against items within a base_value_set.ValueSet, such as AST literals. -type MutatingValueGenerator struct { - // config describes the configuration defining value generation parameters. - config *MutatingValueGeneratorConfig +// MutationalValueGenerator represents a ValueGenerator and ValueMutator for function inputs and call arguments. It +// leverages values from a ValueSet (e.g. AST literals) to generate new values or mutate existing ones. +type MutationalValueGenerator struct { + // config describes the configuration defining value generation and mutation parameters. + config *MutationalValueGeneratorConfig - // ValueSet contains a set of values which the ValueGenerator may use to aid in value generation and mutation + // valueSet contains a set of values which the ValueGenerator may use to aid in value generation and mutation // operations. valueSet *ValueSet @@ -22,8 +22,8 @@ type MutatingValueGenerator struct { *RandomValueGenerator } -// MutatingValueGeneratorConfig defines the operating parameters for a MutatingValueGenerator. -type MutatingValueGeneratorConfig struct { +// MutationalValueGeneratorConfig defines the operating parameters for a MutationalValueGenerator. +type MutationalValueGeneratorConfig struct { // MinMutationRounds describes the minimum amount of mutations which should occur when generating a value. // This parameter is used when generating a new value by mutating a value in the value set, or when mutating // an existing value. @@ -34,7 +34,7 @@ type MutatingValueGeneratorConfig struct { MaxMutationRounds int // GenerateRandomIntegerBias defines the probability in which an address generated by the value generator is - // entirely random, rather than selected from the ValueSet provided by MutatingValueGenerator.SetValueSet. Value + // entirely random, rather than selected from the MutationalValueGenerator's ValueSet. // range is [0.0, 1.0]. GenerateRandomAddressBias float32 // GenerateRandomIntegerBias defines the probability in which an integer generated by the value generator is @@ -82,10 +82,11 @@ type MutatingValueGeneratorConfig struct { *RandomValueGeneratorConfig } -// NewMutatingValueGenerator creates a new MutatingValueGenerator using a provided base_value_set.ValueSet to seed base-values for mutation. -func NewMutatingValueGenerator(config *MutatingValueGeneratorConfig, valueSet *ValueSet, randomProvider *rand.Rand) *MutatingValueGenerator { +// NewMutationalValueGenerator creates a new MutationalValueGenerator using a provided ValueSet to seed base-values for +// mutation. +func NewMutationalValueGenerator(config *MutationalValueGeneratorConfig, valueSet *ValueSet, randomProvider *rand.Rand) *MutationalValueGenerator { // Create and return our generator - generator := &MutatingValueGenerator{ + generator := &MutationalValueGenerator{ config: config, valueSet: valueSet, RandomValueGenerator: NewRandomValueGenerator(config.RandomValueGeneratorConfig, randomProvider), @@ -101,8 +102,8 @@ func NewMutatingValueGenerator(config *MutatingValueGeneratorConfig, valueSet *V // getMutationParams takes a length of inputs and returns an initial input index to start with as a base value, as well // as a random number of mutations which should be performed (within the mutation range specified by the -// ValueGeneratorConfig). -func (g *MutatingValueGenerator) getMutationParams(inputsLen int) (int, int) { +// MutationalValueGeneratorConfig). +func (g *MutationalValueGenerator) getMutationParams(inputsLen int) (int, int) { inputIdx := g.randomProvider.Intn(inputsLen) mutationCount := g.randomProvider.Intn(((g.config.MaxMutationRounds - g.config.MinMutationRounds) + 1) + g.config.MinMutationRounds) return inputIdx, mutationCount @@ -111,20 +112,20 @@ func (g *MutatingValueGenerator) getMutationParams(inputsLen int) (int, int) { // integerMutationMethods define methods which take a big integer and a set of inputs and // transform the integer with a random input and operation. This is used in a loop to create // mutated integer values. -var integerMutationMethods = []func(*MutatingValueGenerator, *big.Int, ...*big.Int) *big.Int{ - func(g *MutatingValueGenerator, x *big.Int, inputs ...*big.Int) *big.Int { +var integerMutationMethods = []func(*MutationalValueGenerator, *big.Int, ...*big.Int) *big.Int{ + func(g *MutationalValueGenerator, x *big.Int, inputs ...*big.Int) *big.Int { // Add a random input return big.NewInt(0).Add(x, inputs[g.randomProvider.Intn(len(inputs))]) }, - func(g *MutatingValueGenerator, x *big.Int, inputs ...*big.Int) *big.Int { + func(g *MutationalValueGenerator, x *big.Int, inputs ...*big.Int) *big.Int { // Subtract a random input return big.NewInt(0).Sub(x, inputs[g.randomProvider.Intn(len(inputs))]) }, - func(g *MutatingValueGenerator, x *big.Int, inputs ...*big.Int) *big.Int { + func(g *MutationalValueGenerator, x *big.Int, inputs ...*big.Int) *big.Int { // Multiply a random input return big.NewInt(0).Mul(x, inputs[g.randomProvider.Intn(len(inputs))]) }, - func(g *MutatingValueGenerator, x *big.Int, inputs ...*big.Int) *big.Int { + func(g *MutationalValueGenerator, x *big.Int, inputs ...*big.Int) *big.Int { // Divide a random input divisor := inputs[g.randomProvider.Intn(len(inputs))] if divisor.Cmp(big.NewInt(0)) == 0 { @@ -132,7 +133,7 @@ var integerMutationMethods = []func(*MutatingValueGenerator, *big.Int, ...*big.I } return big.NewInt(0).Div(x, divisor) }, - func(g *MutatingValueGenerator, x *big.Int, inputs ...*big.Int) *big.Int { + func(g *MutationalValueGenerator, x *big.Int, inputs ...*big.Int) *big.Int { // Modulo divide a random input divisor := inputs[g.randomProvider.Intn(len(inputs))] if divisor.Cmp(big.NewInt(0)) == 0 { @@ -144,7 +145,7 @@ var integerMutationMethods = []func(*MutatingValueGenerator, *big.Int, ...*big.I // mutateIntegerInternal takes an integer input and returns either a random new integer, or a mutated value based off the input. // If a nil input is provided, this method uses an existing base value set value as the starting point for mutation. -func (g *MutatingValueGenerator) mutateIntegerInternal(i *big.Int, signed bool, bitLength int) *big.Int { +func (g *MutationalValueGenerator) mutateIntegerInternal(i *big.Int, signed bool, bitLength int) *big.Int { // If our bias directs us to, use the random generator instead randomGeneratorDecision := g.randomProvider.Float32() if randomGeneratorDecision < g.config.GenerateRandomIntegerBias { @@ -188,9 +189,9 @@ func (g *MutatingValueGenerator) mutateIntegerInternal(i *big.Int, signed bool, // bytesMutationMethods define methods which take an initial bytes and a set of inputs to transform the input. The // transformed input is returned. This is used in a loop to mutate byte slices. -var bytesMutationMethods = []func(*MutatingValueGenerator, []byte, ...[]byte) []byte{ +var bytesMutationMethods = []func(*MutationalValueGenerator, []byte, ...[]byte) []byte{ // Replace a random index with a random byte - func(g *MutatingValueGenerator, b []byte, inputs ...[]byte) []byte { + func(g *MutationalValueGenerator, b []byte, inputs ...[]byte) []byte { // Generate a random byte and replace an existing byte in our array with it. If our array has no bytes, we add // it. randomByteValue := byte(g.randomProvider.Intn(256)) @@ -202,7 +203,7 @@ var bytesMutationMethods = []func(*MutatingValueGenerator, []byte, ...[]byte) [] return b }, // Flip a random bit in it. - func(g *MutatingValueGenerator, b []byte, inputs ...[]byte) []byte { + func(g *MutationalValueGenerator, b []byte, inputs ...[]byte) []byte { // If we have bytes in our array, flip a random bit in a random byte. Otherwise, we add a random byte. if len(b) > 0 { i := g.randomProvider.Intn(len(b)) @@ -213,7 +214,7 @@ var bytesMutationMethods = []func(*MutatingValueGenerator, []byte, ...[]byte) [] return b }, // Add a random byte at a random position - func(g *MutatingValueGenerator, b []byte, inputs ...[]byte) []byte { + func(g *MutationalValueGenerator, b []byte, inputs ...[]byte) []byte { // Generate a random byte to insert by := byte(g.randomProvider.Intn(256)) @@ -232,7 +233,7 @@ var bytesMutationMethods = []func(*MutatingValueGenerator, []byte, ...[]byte) [] } }, // Remove a random byte - func(g *MutatingValueGenerator, b []byte, inputs ...[]byte) []byte { + func(g *MutationalValueGenerator, b []byte, inputs ...[]byte) []byte { // If we have no bytes to remove, do nothing. if len(b) == 0 { return b @@ -246,7 +247,7 @@ var bytesMutationMethods = []func(*MutatingValueGenerator, []byte, ...[]byte) [] // mutateBytesInternal takes a byte array and returns either a random new byte array, or a mutated value based off the // input. // If a nil input is provided, this method uses an existing base value set value as the starting point for mutation. -func (g *MutatingValueGenerator) mutateBytesInternal(b []byte) []byte { +func (g *MutationalValueGenerator) mutateBytesInternal(b []byte) []byte { // If we have no inputs or our bias directs us to, use the random generator instead inputs := g.valueSet.Bytes() randomGeneratorDecision := g.randomProvider.Float32() @@ -273,9 +274,9 @@ func (g *MutatingValueGenerator) mutateBytesInternal(b []byte) []byte { // stringMutationMethods define methods which take an initial string and a set of inputs to transform the input. The // transformed input is returned. This is used in a loop to mutate strings. -var stringMutationMethods = []func(*MutatingValueGenerator, string, ...string) string{ +var stringMutationMethods = []func(*MutationalValueGenerator, string, ...string) string{ // Replace a random index with a random character - func(g *MutatingValueGenerator, s string, inputs ...string) string { + func(g *MutationalValueGenerator, s string, inputs ...string) string { // Generate a random rune randomRune := rune(32 + g.randomProvider.Intn(95)) @@ -290,7 +291,7 @@ var stringMutationMethods = []func(*MutatingValueGenerator, string, ...string) s return string(r) }, // Flip a random bit - func(g *MutatingValueGenerator, s string, inputs ...string) string { + func(g *MutationalValueGenerator, s string, inputs ...string) string { // If the string is empty, simply return a new one with a randomly added character. r := []rune(s) if len(r) == 0 { @@ -303,7 +304,7 @@ var stringMutationMethods = []func(*MutatingValueGenerator, string, ...string) s return string(r) }, // Insert a random character at a random position - func(g *MutatingValueGenerator, s string, inputs ...string) string { + func(g *MutationalValueGenerator, s string, inputs ...string) string { // Create a random character. c := string(rune(32 + g.randomProvider.Intn(95))) @@ -317,7 +318,7 @@ var stringMutationMethods = []func(*MutatingValueGenerator, string, ...string) s return s[:i] + c + s[i+1:] }, // Remove a random character - func(g *MutatingValueGenerator, s string, inputs ...string) string { + func(g *MutationalValueGenerator, s string, inputs ...string) string { // If we have no characters to remove, do nothing if len(s) == 0 { return s @@ -331,7 +332,7 @@ var stringMutationMethods = []func(*MutatingValueGenerator, string, ...string) s // mutateStringInternal takes a string and returns either a random new string, or a mutated value based off the input. // If a nil input is provided, this method uses an existing base value set value as the starting point for mutation. -func (g *MutatingValueGenerator) mutateStringInternal(s *string) string { +func (g *MutationalValueGenerator) mutateStringInternal(s *string) string { // If we have no inputs or our bias directs us to, use the random generator instead inputs := g.valueSet.Strings() randomGeneratorDecision := g.randomProvider.Float32() @@ -357,7 +358,7 @@ func (g *MutatingValueGenerator) mutateStringInternal(s *string) string { } // GenerateAddress obtains an existing address from its underlying value set or generates a random one. -func (g *MutatingValueGenerator) GenerateAddress() common.Address { +func (g *MutationalValueGenerator) GenerateAddress() common.Address { // If our bias directs us to, use the random generator instead randomGeneratorDecision := g.randomProvider.Float32() if randomGeneratorDecision < g.config.GenerateRandomAddressBias { @@ -376,7 +377,7 @@ func (g *MutatingValueGenerator) GenerateAddress() common.Address { } // MutateAddress takes an address input and sometimes returns a mutated value based off the input. -func (g *MutatingValueGenerator) MutateAddress(addr common.Address) common.Address { +func (g *MutationalValueGenerator) MutateAddress(addr common.Address) common.Address { // Determine whether to perform mutations against this input or just return it as-is. randomGeneratorDecision := g.randomProvider.Float32() if randomGeneratorDecision < g.config.MutateAddressProbability { @@ -388,7 +389,7 @@ func (g *MutatingValueGenerator) MutateAddress(addr common.Address) common.Addre // MutateArray takes a dynamic or fixed sized array as input, and returns a mutated value based off of the input. // Returns the mutated value. If any element of the returned array is nil, the value generator will be called upon // to generate it new. -func (g *MutatingValueGenerator) MutateArray(value []any, fixedLength bool) []any { +func (g *MutationalValueGenerator) MutateArray(value []any, fixedLength bool) []any { // Determine whether to perform mutations against this input or just return it as-is. randomGeneratorDecision := g.randomProvider.Float32() if randomGeneratorDecision < g.config.MutateArrayStructureProbability { @@ -403,7 +404,7 @@ func (g *MutatingValueGenerator) MutateArray(value []any, fixedLength bool) []an } // MutateBool takes a boolean input and returns a mutated value based off the input. -func (g *MutatingValueGenerator) MutateBool(bl bool) bool { +func (g *MutationalValueGenerator) MutateBool(bl bool) bool { // Determine whether to perform mutations against this input or just return it as-is. randomGeneratorDecision := g.randomProvider.Float32() if randomGeneratorDecision < g.config.MutateBoolProbability { @@ -413,12 +414,12 @@ func (g *MutatingValueGenerator) MutateBool(bl bool) bool { } // GenerateBytes generates bytes and returns them. -func (g *MutatingValueGenerator) GenerateBytes() []byte { +func (g *MutationalValueGenerator) GenerateBytes() []byte { return g.mutateBytesInternal(nil) } // MutateBytes takes a dynamic-sized byte array input and returns a mutated value based off the input. -func (g *MutatingValueGenerator) MutateBytes(b []byte) []byte { +func (g *MutationalValueGenerator) MutateBytes(b []byte) []byte { // Determine whether to perform mutations against this input or just return it as-is. randomGeneratorDecision := g.randomProvider.Float32() if randomGeneratorDecision < g.config.MutateBytesProbability { @@ -434,7 +435,7 @@ func (g *MutatingValueGenerator) MutateBytes(b []byte) []byte { } // MutateFixedBytes takes a fixed-sized byte array input and returns a mutated value based off the input. -func (g *MutatingValueGenerator) MutateFixedBytes(b []byte) []byte { +func (g *MutationalValueGenerator) MutateFixedBytes(b []byte) []byte { // Determine whether to perform mutations against this input or just return it as-is. randomGeneratorDecision := g.randomProvider.Float32() if randomGeneratorDecision < g.config.MutateFixedBytesProbability { @@ -444,12 +445,12 @@ func (g *MutatingValueGenerator) MutateFixedBytes(b []byte) []byte { } // GenerateString generates strings and returns them. -func (g *MutatingValueGenerator) GenerateString() string { +func (g *MutationalValueGenerator) GenerateString() string { return g.mutateStringInternal(nil) } // MutateString takes a string input and returns a mutated value based off the input. -func (g *MutatingValueGenerator) MutateString(s string) string { +func (g *MutationalValueGenerator) MutateString(s string) string { // Determine whether to perform mutations against this input or just return it as-is. randomGeneratorDecision := g.randomProvider.Float32() if randomGeneratorDecision < g.config.MutateStringProbability { @@ -465,14 +466,14 @@ func (g *MutatingValueGenerator) MutateString(s string) string { } // GenerateInteger generates an integer of the provided properties and returns a big.Int representing it. -func (g *MutatingValueGenerator) GenerateInteger(signed bool, bitLength int) *big.Int { +func (g *MutationalValueGenerator) GenerateInteger(signed bool, bitLength int) *big.Int { // Call our internal mutation method with no starting input. This will generate a new input. return g.mutateIntegerInternal(nil, signed, bitLength) } // MutateInteger takes an integer input and applies optional mutations to the provided value. // Returns an optionally mutated copy of the input. -func (g *MutatingValueGenerator) MutateInteger(i *big.Int, signed bool, bitLength int) *big.Int { +func (g *MutationalValueGenerator) MutateInteger(i *big.Int, signed bool, bitLength int) *big.Int { // Determine whether to perform mutations against this input or just return it as-is. randomGeneratorDecision := g.randomProvider.Float32() if randomGeneratorDecision < g.config.MutateIntegerProbability { diff --git a/fuzzing/valuegeneration/value_generator_random.go b/fuzzing/valuegeneration/generator_random.go similarity index 90% rename from fuzzing/valuegeneration/value_generator_random.go rename to fuzzing/valuegeneration/generator_random.go index fe77cd88..eb9de140 100644 --- a/fuzzing/valuegeneration/value_generator_random.go +++ b/fuzzing/valuegeneration/generator_random.go @@ -7,9 +7,8 @@ import ( "math/rand" ) -// RandomValueGenerator represents an interface for a provider used to generate transaction fields and call arguments -// using a random provider. As such it may not be accurate in many test results with tightly-bound pre-conditions. -// This provider does not mutate existing values and will leave them unaltered. +// RandomValueGenerator represents a ValueGenerator used to generate transaction fields and call arguments with values +// provided by a random number generator. type RandomValueGenerator struct { // config describes the configuration defining value generation parameters. config *RandomValueGeneratorConfig @@ -34,7 +33,7 @@ type RandomValueGeneratorConfig struct { GenerateRandomStringMaxSize int } -// NewRandomValueGenerator creates a new RandomValueGenerator with a new random provider. +// NewRandomValueGenerator creates a new RandomValueGenerator. func NewRandomValueGenerator(config *RandomValueGeneratorConfig, randomProvider *rand.Rand) *RandomValueGenerator { // Create and return our generator generator := &RandomValueGenerator{ @@ -44,11 +43,6 @@ func NewRandomValueGenerator(config *RandomValueGeneratorConfig, randomProvider return generator } -// RandomProvider returns the internal random provider used for value generation. -func (g *RandomValueGenerator) RandomProvider() *rand.Rand { - return g.randomProvider -} - // GenerateAddress generates a random address to use when populating inputs. func (g *RandomValueGenerator) GenerateAddress() common.Address { // Generate random bytes of the address length, then convert it to an address. diff --git a/fuzzing/valuegeneration/value_generator_interface.go b/fuzzing/valuegeneration/mutator.go similarity index 51% rename from fuzzing/valuegeneration/value_generator_interface.go rename to fuzzing/valuegeneration/mutator.go index c230ae22..a045bc02 100644 --- a/fuzzing/valuegeneration/value_generator_interface.go +++ b/fuzzing/valuegeneration/mutator.go @@ -3,49 +3,31 @@ package valuegeneration import ( "github.com/ethereum/go-ethereum/common" "math/big" - "math/rand" ) -// ValueGenerator represents an interface for a provider used to generate function inputs and call arguments for use +// ValueMutator represents an interface for a provider used to mutate function inputs and call arguments for use // in fuzzing campaigns. -type ValueGenerator interface { - // RandomProvider returns the internal random provider used for value generation. - RandomProvider() *rand.Rand - - // GenerateAddress generates/selects an address to use when populating inputs. - GenerateAddress() common.Address +type ValueMutator interface { // MutateAddress takes an address input and returns a mutated value based off the input. MutateAddress(addr common.Address) common.Address - // GenerateArrayOfLength generates/selects an array length to use when populating inputs. - GenerateArrayOfLength() int // MutateArray takes a dynamic or fixed sized array as input, and returns a mutated value based off of the input. // Returns the mutated value. If any element of the returned array is nil, the value generator will be called upon - // to generate it new. + // to generate a new value in its place. MutateArray(value []any, fixedLength bool) []any - // GenerateBool generates/selects a bool to use when populating inputs. - GenerateBool() bool // MutateBool takes a boolean input and returns a mutated value based off the input. MutateBool(bl bool) bool - // GenerateBytes generates/selects a dynamic-sized byte array to use when populating inputs. - GenerateBytes() []byte // MutateBytes takes a dynamic-sized byte array input and returns a mutated value based off the input. MutateBytes(b []byte) []byte - // GenerateFixedBytes generates/selects a fixed-sized byte array to use when populating inputs. - GenerateFixedBytes(length int) []byte // MutateFixedBytes takes a fixed-sized byte array input and returns a mutated value based off the input. MutateFixedBytes(b []byte) []byte - // GenerateString generates/selects a dynamic-sized string to use when populating inputs. - GenerateString() string // MutateString takes a string input and returns a mutated value based off the input. MutateString(s string) string - // GenerateInteger generates/selects an integer to use when populating inputs. - GenerateInteger(signed bool, bitLength int) *big.Int // MutateInteger takes an integer input and returns a mutated value based off the input. MutateInteger(i *big.Int, signed bool, bitLength int) *big.Int } diff --git a/fuzzing/valuegeneration/mutator_shrinking.go b/fuzzing/valuegeneration/mutator_shrinking.go new file mode 100644 index 00000000..20b7ff49 --- /dev/null +++ b/fuzzing/valuegeneration/mutator_shrinking.go @@ -0,0 +1,195 @@ +package valuegeneration + +import ( + "github.com/crytic/medusa/utils" + "github.com/ethereum/go-ethereum/common" + "math/big" + "math/rand" +) + +// ShrinkingValueMutator represents a ValueMutator used to shrink function inputs and call arguments. +type ShrinkingValueMutator struct { + // config describes the configuration defining value mutation parameters. + config *ShrinkingValueMutatorConfig + + // valueSet contains a set of values which the ValueGenerator may use to aid in value generation and mutation + // operations. + valueSet *ValueSet + + // randomProvider offers a source of random data. + randomProvider *rand.Rand +} + +// ShrinkingValueMutatorConfig defines the operating parameters for a ShrinkingValueMutator. +type ShrinkingValueMutatorConfig struct { + // ShrinkValueProbability is the probability that any shrinkable value will be shrunk/mutated when a mutation + // method is invoked. + ShrinkValueProbability float32 +} + +// NewShrinkingValueMutator creates a new ShrinkingValueMutator using a ValueSet to seed base-values for mutation. +func NewShrinkingValueMutator(config *ShrinkingValueMutatorConfig, valueSet *ValueSet, randomProvider *rand.Rand) *ShrinkingValueMutator { + // Create and return our generator + generator := &ShrinkingValueMutator{ + config: config, + valueSet: valueSet, + randomProvider: randomProvider, + } + + // Ensure some initial values this mutator will depend on for basic mutations to the set. + generator.valueSet.AddInteger(big.NewInt(0)) + generator.valueSet.AddInteger(big.NewInt(1)) + generator.valueSet.AddInteger(big.NewInt(2)) + return generator +} + +// MutateAddress takes an address input and sometimes returns a mutated value based off the input. +// This type is not mutated by the ShrinkingValueMutator. +func (g *ShrinkingValueMutator) MutateAddress(addr common.Address) common.Address { + return addr +} + +// MutateArray takes a dynamic or fixed sized array as input, and returns a mutated value based off of the input. +// Returns the mutated value. If any element of the returned array is nil, the value generator will be called upon +// to generate it new. +// This type is not mutated by the ShrinkingValueMutator. +func (g *ShrinkingValueMutator) MutateArray(value []any, fixedLength bool) []any { + return value +} + +// MutateBool takes a boolean input and returns a mutated value based off the input. +// This type is not mutated by the ShrinkingValueMutator. +func (g *ShrinkingValueMutator) MutateBool(bl bool) bool { + return bl +} + +// MutateFixedBytes takes a fixed-sized byte array input and returns a mutated value based off the input. +// This type is not mutated by the ShrinkingValueMutator. +func (g *ShrinkingValueMutator) MutateFixedBytes(b []byte) []byte { + return b +} + +// bytesShrinkingMethods define methods which take an initial bytes and a set of inputs to transform the input. The +// transformed input is returned. +var bytesShrinkingMethods = []func(*ShrinkingValueMutator, []byte) []byte{ + // Replace a random index with a zero byte + func(g *ShrinkingValueMutator, b []byte) []byte { + if len(b) > 0 { + b[g.randomProvider.Intn(len(b))] = 0 + } + return b + }, + // Remove a random byte + func(g *ShrinkingValueMutator, b []byte) []byte { + // If we have no bytes to remove, do nothing. + if len(b) == 0 { + return b + } + + i := g.randomProvider.Intn(len(b)) + return append(b[:i], b[i+1:]...) + }, +} + +// MutateBytes takes a dynamic-sized byte array input and returns a mutated value based off the input. +func (g *ShrinkingValueMutator) MutateBytes(b []byte) []byte { + randomGeneratorDecision := g.randomProvider.Float32() + if randomGeneratorDecision < g.config.ShrinkValueProbability { + // Mutate the data for our desired number of rounds + input := bytesShrinkingMethods[g.randomProvider.Intn(len(bytesShrinkingMethods))](g, b) + return input + } + return b +} + +// integerShrinkingMethods define methods which take a big integer and a set of inputs and +// transform the integer with a random input and operation. +var integerShrinkingMethods = []func(*ShrinkingValueMutator, *big.Int, ...*big.Int) *big.Int{ + func(g *ShrinkingValueMutator, x *big.Int, inputs ...*big.Int) *big.Int { + // If our base value is positive, we subtract from it. If it's positive, we add to it. + // If it's zero, we leave it unchanged. + r := big.NewInt(0) + if x.Cmp(r) > 0 { + r = r.Sub(x, inputs[g.randomProvider.Intn(len(inputs))]) + } else if x.Cmp(r) < 0 { + r = r.Add(x, inputs[g.randomProvider.Intn(len(inputs))]) + } + return r + + }, + func(g *ShrinkingValueMutator, x *big.Int, inputs ...*big.Int) *big.Int { + // Divide by two + return big.NewInt(0).Div(x, big.NewInt(2)) + }, +} + +// MutateInteger takes an integer input and applies optional mutations to the provided value. +// Returns an optionally mutated copy of the input. +func (g *ShrinkingValueMutator) MutateInteger(i *big.Int, signed bool, bitLength int) *big.Int { + randomGeneratorDecision := g.randomProvider.Float32() + if randomGeneratorDecision < g.config.ShrinkValueProbability { + // Calculate our integer bounds + min, max := utils.GetIntegerConstraints(signed, bitLength) + + // Obtain our inputs. We also add our min/max values for this range to the list of inputs. + // Note: We exclude min being added if we're requesting an unsigned integer, as zero is already + // in our set, and we don't want duplicates. + var inputs []*big.Int + inputs = append(inputs, g.valueSet.Integers()...) + if signed { + inputs = append(inputs, min, max) + } else { + inputs = append(inputs, max) + } + + // Set the input and ensure it is constrained to the value boundaries + input := new(big.Int).Set(i) + input = utils.ConstrainIntegerToBounds(input, min, max) + + // Shrink input + input = integerShrinkingMethods[g.randomProvider.Intn(len(integerShrinkingMethods))](g, input, inputs...) + + // Correct value boundaries (underflow/overflow) + input = utils.ConstrainIntegerToBounds(input, min, max) + return input + } + return i +} + +// stringShrinkingMethods define methods which take an initial string and a set of inputs to transform the input. The +// transformed input is returned. +var stringShrinkingMethods = []func(*ShrinkingValueMutator, string) string{ + // Replace a random index with a NULL char + func(g *ShrinkingValueMutator, s string) string { + // If the string is empty, we can simply return a new string with just the rune in it. + r := []rune(s) + if len(r) == 0 { + return string(r) + } + + // Otherwise, we replace a rune in it and return it. + r[g.randomProvider.Intn(len(r))] = 0 + return string(r) + }, + // Remove a random character + func(g *ShrinkingValueMutator, s string) string { + // If we have no characters to remove, do nothing + if len(s) == 0 { + return s + } + + // Otherwise, remove a random character. + i := g.randomProvider.Intn(len(s)) + return s[:i] + s[i+1:] + }, +} + +// MutateString takes a string input and returns a mutated value based off the input. +func (g *ShrinkingValueMutator) MutateString(s string) string { + randomGeneratorDecision := g.randomProvider.Float32() + if randomGeneratorDecision < g.config.ShrinkValueProbability { + input := stringShrinkingMethods[g.randomProvider.Intn(len(stringShrinkingMethods))](g, s) + return input + } + return s +} diff --git a/fuzzing/valuegeneration/value_set.go b/fuzzing/valuegeneration/value_set.go index 77e78b99..883aab73 100644 --- a/fuzzing/valuegeneration/value_set.go +++ b/fuzzing/valuegeneration/value_set.go @@ -64,6 +64,12 @@ func (vs *ValueSet) AddAddress(a common.Address) { vs.addresses[a] = nil } +// ContainsAddress checks if an address is contained in the ValueSet. +func (vs *ValueSet) ContainsAddress(a common.Address) bool { + _, contains := vs.addresses[a] + return contains +} + // RemoveAddress removes an address item from the ValueSet. func (vs *ValueSet) RemoveAddress(a common.Address) { delete(vs.addresses, a) @@ -85,6 +91,12 @@ func (vs *ValueSet) AddInteger(b *big.Int) { vs.integers[b.String()] = b } +// ContainsInteger checks if an integer is contained in the ValueSet. +func (vs *ValueSet) ContainsInteger(b *big.Int) bool { + _, contains := vs.integers[b.String()] + return contains +} + // RemoveInteger removes an integer item from the ValueSet. func (vs *ValueSet) RemoveInteger(b *big.Int) { delete(vs.integers, b.String()) @@ -106,6 +118,12 @@ func (vs *ValueSet) AddString(s string) { vs.strings[s] = nil } +// ContainsString checks if a string is contained in the ValueSet. +func (vs *ValueSet) ContainsString(s string) bool { + _, contains := vs.strings[s] + return contains +} + // RemoveString removes a string item from the ValueSet. func (vs *ValueSet) RemoveString(s string) { delete(vs.strings, s) @@ -133,6 +151,18 @@ func (vs *ValueSet) AddBytes(b []byte) { vs.bytes[hashStr] = b } +// ContainsBytes checks if a byte sequence is contained in the ValueSet. +func (vs *ValueSet) ContainsBytes(b []byte) bool { + // Calculate hash and reset our hash provider + vs.hashProvider.Write(b) + hashStr := hex.EncodeToString(vs.hashProvider.Sum(nil)) + vs.hashProvider.Reset() + + // Check if the key exists in our lookup + _, contains := vs.bytes[hashStr] + return contains +} + // RemoveBytes removes a byte sequence item from the ValueSet. func (vs *ValueSet) RemoveBytes(b []byte) { // Calculate hash and reset our hash provider diff --git a/fuzzing/valuegeneration/value_set_from_ast.go b/fuzzing/valuegeneration/value_set_from_ast.go index 11eb9c5a..0051956c 100644 --- a/fuzzing/valuegeneration/value_set_from_ast.go +++ b/fuzzing/valuegeneration/value_set_from_ast.go @@ -2,6 +2,7 @@ package valuegeneration import ( "github.com/ethereum/go-ethereum/common" + "github.com/shopspring/decimal" "math/big" "strings" ) @@ -20,8 +21,16 @@ func (vs *ValueSet) SeedFromAst(ast any) { return // fail silently to continue walking } + // Extract the subdenomination type + tempSubdenomination, obtainedSubdenomination := node["subdenomination"].(string) + var literalSubdenomination *string + if obtainedSubdenomination { + literalSubdenomination = &tempSubdenomination + } + // Seed ValueSet with literals if literalKind == "number" { + // If it has a 0x prefix, it won't have decimals if strings.HasPrefix(literalValue, "0x") { if b, ok := big.NewInt(0).SetString(literalValue[2:], 16); ok { vs.AddInteger(b) @@ -29,7 +38,8 @@ func (vs *ValueSet) SeedFromAst(ast any) { vs.AddAddress(common.BigToAddress(b)) } } else { - if b, ok := big.NewInt(0).SetString(literalValue, 10); ok { + if decValue, err := decimal.NewFromString(literalValue); err == nil { + b := getAbsoluteValueFromDenominatedValue(decValue, literalSubdenomination) vs.AddInteger(b) vs.AddInteger(new(big.Int).Neg(b)) vs.AddAddress(common.BigToAddress(b)) @@ -42,6 +52,50 @@ func (vs *ValueSet) SeedFromAst(ast any) { }) } +// getAbsoluteValueFromDenominatedValue converts a given decimal number in a provided denomination to a big.Int +// that represents its actual calculated value. +// Note: Decimals must be used as big.Float is prone to similar mantissa-related precision issues as float32/float64. +// Returns the calculated value given the floating point number in a given denomination. +func getAbsoluteValueFromDenominatedValue(number decimal.Decimal, denomination *string) *big.Int { + // If the denomination is nil, we do nothing + if denomination == nil { + return number.BigInt() + } + + // Otherwise, switch on the type and obtain a multiplier + var multiplier decimal.Decimal + switch *denomination { + case "wei": + multiplier = decimal.NewFromFloat32(1) + case "gwei": + multiplier = decimal.NewFromFloat32(1e9) + case "szabo": + multiplier = decimal.NewFromFloat32(1e12) + case "finney": + multiplier = decimal.NewFromFloat32(1e15) + case "ether": + multiplier = decimal.NewFromFloat32(1e18) + case "seconds": + multiplier = decimal.NewFromFloat32(1) + case "minutes": + multiplier = decimal.NewFromFloat32(60) + case "hours": + multiplier = decimal.NewFromFloat32(60 * 60) + case "days": + multiplier = decimal.NewFromFloat32(60 * 60 * 24) + case "weeks": + multiplier = decimal.NewFromFloat32(60 * 60 * 24 * 7) + case "years": + multiplier = decimal.NewFromFloat32(60 * 60 * 24 * 7 * 365) + default: + multiplier = decimal.NewFromFloat32(1) + } + + // Obtain the transformed number as an integer. + transformedValue := number.Mul(multiplier) + return transformedValue.BigInt() +} + // walkAstNodes walks/iterates across an AST for each node, calling the provided walk function with each discovered node // as an argument. func walkAstNodes(ast any, walkFunc func(node map[string]any)) { diff --git a/go.mod b/go.mod index efaec0f5..3bae89ac 100644 --- a/go.mod +++ b/go.mod @@ -1,70 +1,91 @@ module github.com/crytic/medusa -go 1.18 +go 1.22 require ( github.com/Masterminds/semver v1.5.0 - github.com/ethereum/go-ethereum v1.11.1 + github.com/ethereum/go-ethereum v1.14.6 github.com/fxamacker/cbor v1.5.1 - github.com/google/uuid v1.3.0 - github.com/spf13/cobra v1.7.0 - github.com/stretchr/testify v1.8.2 - golang.org/x/crypto v0.8.0 - golang.org/x/exp v0.0.0-20230206171751-46f607a40771 - golang.org/x/net v0.9.0 + github.com/google/uuid v1.6.0 + github.com/holiman/uint256 v1.3.0 + github.com/pkg/errors v0.9.1 + github.com/rs/zerolog v1.33.0 + github.com/shopspring/decimal v1.4.0 + github.com/spf13/cobra v1.8.1 + github.com/spf13/pflag v1.0.5 + github.com/stretchr/testify v1.9.0 + golang.org/x/crypto v0.25.0 + golang.org/x/exp v0.0.0-20240707233637-46b078467d37 + golang.org/x/net v0.27.0 + golang.org/x/sys v0.22.0 ) require ( - github.com/DataDog/zstd v1.5.2 // indirect - github.com/VictoriaMetrics/fastcache v1.12.0 // indirect + github.com/DataDog/zstd v1.5.5 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/VictoriaMetrics/fastcache v1.12.2 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect - github.com/cockroachdb/errors v1.9.1 // indirect + github.com/bits-and-blooms/bitset v1.13.0 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.3 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cockroachdb/errors v1.11.3 // indirect + github.com/cockroachdb/fifo v0.0.0-20240616162244-4768e80dfb9a // indirect github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect - github.com/cockroachdb/pebble v0.0.0-20230209160836-829675f94811 // indirect - github.com/cockroachdb/redact v1.1.3 // indirect + github.com/cockroachdb/pebble v1.1.1 // indirect + github.com/cockroachdb/redact v1.1.5 // indirect + github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect + github.com/consensys/bavard v0.1.13 // indirect + github.com/consensys/gnark-crypto v0.12.1 // indirect + github.com/crate-crypto/go-ipa v0.0.0-20240223125850-b1e8a79f509c // indirect + github.com/crate-crypto/go-kzg-4844 v1.0.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/deckarep/golang-set/v2 v2.1.0 // indirect - github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect - github.com/getsentry/sentry-go v0.18.0 // indirect - github.com/go-ole/go-ole v1.2.6 // indirect - github.com/go-stack/stack v1.8.1 // indirect + github.com/deckarep/golang-set/v2 v2.6.0 // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 // indirect + github.com/ethereum/c-kzg-4844 v1.0.2 // indirect + github.com/ethereum/go-verkle v0.1.1-0.20240306133620-7d920df305f0 // indirect + github.com/fsnotify/fsnotify v1.7.0 // indirect + github.com/gballet/go-libpcsclite v0.0.0-20191108122812-4678299bea08 // indirect + github.com/getsentry/sentry-go v0.28.1 // indirect + github.com/go-ole/go-ole v1.3.0 // indirect + github.com/gofrs/flock v0.12.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/protobuf v1.5.2 // indirect - github.com/golang/snappy v0.0.4 // indirect - github.com/gorilla/websocket v1.5.0 // indirect - github.com/holiman/big v0.0.0-20221017200358-a027dc42d04e // indirect + github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect + github.com/gorilla/websocket v1.5.3 // indirect github.com/holiman/bloomfilter/v2 v2.0.3 // indirect - github.com/holiman/uint256 v1.2.1 // indirect + github.com/huin/goupnp v1.3.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/klauspost/compress v1.15.15 // indirect + github.com/jackpal/go-nat-pmp v1.0.2 // indirect + github.com/klauspost/compress v1.17.9 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect - github.com/mattn/go-runewidth v0.0.14 // indirect - github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.15 // indirect + github.com/mmcloughlin/addchain v0.4.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect - github.com/pkg/errors v0.9.1 // indirect + github.com/onsi/gomega v1.33.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/prometheus/client_golang v1.14.0 // indirect - github.com/prometheus/client_model v0.3.0 // indirect - github.com/prometheus/common v0.39.0 // indirect - github.com/prometheus/procfs v0.9.0 // indirect - github.com/prometheus/tsdb v0.10.0 // indirect - github.com/rivo/uniseg v0.4.3 // indirect - github.com/rogpeppe/go-internal v1.9.0 // indirect + github.com/prometheus/client_golang v1.19.1 // indirect + github.com/prometheus/client_model v0.6.1 // indirect + github.com/prometheus/common v0.55.0 // indirect + github.com/prometheus/procfs v0.15.1 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/rogpeppe/go-internal v1.12.0 // indirect github.com/shirou/gopsutil v3.21.11+incompatible // indirect - github.com/spf13/pflag v1.0.5 // indirect + github.com/status-im/keycard-go v0.3.2 // indirect + github.com/supranational/blst v0.3.12 // indirect github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect - github.com/tklauser/go-sysconf v0.3.11 // indirect - github.com/tklauser/numcpus v0.6.0 // indirect + github.com/tklauser/go-sysconf v0.3.14 // indirect + github.com/tklauser/numcpus v0.8.0 // indirect + github.com/tyler-smith/go-bip39 v1.1.0 // indirect github.com/x448/float16 v0.8.4 // indirect - github.com/yusufpapurcu/wmi v1.2.2 // indirect - golang.org/x/sys v0.7.0 // indirect - golang.org/x/text v0.9.0 // indirect - google.golang.org/protobuf v1.28.1 // indirect - gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect + github.com/yusufpapurcu/wmi v1.2.4 // indirect + golang.org/x/sync v0.7.0 // indirect + golang.org/x/text v0.16.0 // indirect + google.golang.org/protobuf v1.34.2 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect + rsc.io/tmplfunc v0.0.3 // indirect ) -replace github.com/ethereum/go-ethereum v1.11.1 => github.com/crytic/medusa-geth v0.0.0-20230221190257-777a77b25150 +replace github.com/ethereum/go-ethereum => github.com/crytic/medusa-geth v0.0.0-20240919134035-0fd368c28419 diff --git a/go.sum b/go.sum index 6a0beced..6c8f950d 100644 --- a/go.sum +++ b/go.sum @@ -1,508 +1,288 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53/go.mod h1:+3IMCy2vIlbG1XG/0ggNQv0SvxCAIpPM5b1nCz56Xno= -github.com/CloudyKit/jet/v3 v3.0.0/go.mod h1:HKQPgSJmdK8hdoAbKUUWajkHyHo4RaU5rMdUywE7VMo= -github.com/DataDog/zstd v1.5.2 h1:vUG4lAyuPCXO0TLbXvPv7EB7cNK1QV/luu55UHLrrn8= -github.com/DataDog/zstd v1.5.2/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= -github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= +github.com/DataDog/zstd v1.5.5 h1:oWf5W7GtOLgp6bciQYDmhHHjdhYkALu6S/5Ni9ZgSvQ= +github.com/DataDog/zstd v1.5.5/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0= -github.com/VictoriaMetrics/fastcache v1.12.0 h1:vnVi/y9yKDcD9akmc4NqAoqgQhJrOwUF+j9LTgn4QDE= -github.com/VictoriaMetrics/fastcache v1.12.0/go.mod h1:tjiYeEfYXCqacuvYw/7UoDIeJaNxq6132xHICNP77w8= -github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/VictoriaMetrics/fastcache v1.12.2 h1:N0y9ASrJ0F6h0QaC3o6uJb3NIZ9VKLjCM7NQbSmF7WI= +github.com/VictoriaMetrics/fastcache v1.12.2/go.mod h1:AmC+Nzz1+3G2eCPapF6UcsnkThDcMsQicp4xDukwJYI= github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156 h1:eMwmnE/GDgah4HI848JfFxHt+iPb26b4zyfspmqY0/8= github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156/go.mod h1:Cb/ax3seSYIx7SuZdm2G2xzfwmv3TPSk2ucNfQESPXM= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/btcsuite/btcd/btcec/v2 v2.3.2 h1:5n0X6hX0Zk+6omWcihdYvdAlGf2DfasC0GMf7DClJ3U= -github.com/btcsuite/btcd/btcec/v2 v2.3.2/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04= +github.com/bits-and-blooms/bitset v1.13.0 h1:bAQ9OPNFYbGHV6Nez0tmNI0RiEu7/hxlYJRUA0wFAVE= +github.com/bits-and-blooms/bitset v1.13.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/btcsuite/btcd/btcec/v2 v2.3.3 h1:6+iXlDKE8RMtKsvK0gshlXIuPbyWM/h84Ensb7o3sC0= +github.com/btcsuite/btcd/btcec/v2 v2.3.3/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04= github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= +github.com/cespare/cp v0.1.0 h1:SE+dxFebS7Iik5LK0tsi1k9ZCxEaFX4AjQmoyA+1dJk= +github.com/cespare/cp v0.1.0/go.mod h1:SOGHArjBr4JWaSDEVpWpo/hNg6RoKrls6Oh40hiwW+s= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cockroachdb/datadriven v1.0.2 h1:H9MtNqVoVhvd9nCBwOyDjUEdZCREqbIdCJD93PBm/jA= -github.com/cockroachdb/datadriven v1.0.2/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU= -github.com/cockroachdb/errors v1.9.1 h1:yFVvsI0VxmRShfawbt/laCIDy/mtTqqnvoNgiy5bEV8= -github.com/cockroachdb/errors v1.9.1/go.mod h1:2sxOtL2WIc096WSZqZ5h8fa17rdDq9HZOZLBCor4mBk= -github.com/cockroachdb/logtags v0.0.0-20211118104740-dabe8e521a4f/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f h1:otljaYPt5hWxV3MUfO5dFPFiOXg9CyG5/kCfayTqsJ4= +github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU= +github.com/cockroachdb/errors v1.11.3 h1:5bA+k2Y6r+oz/6Z/RFlNeVCesGARKuC6YymtcDrbC/I= +github.com/cockroachdb/errors v1.11.3/go.mod h1:m4UIW4CDjx+R5cybPsNrRbreomiFqt8o1h1wUVazSd8= +github.com/cockroachdb/fifo v0.0.0-20240616162244-4768e80dfb9a h1:f52TdbU4D5nozMAhO9TvTJ2ZMCXtN4VIAmfrrZ0JXQ4= +github.com/cockroachdb/fifo v0.0.0-20240616162244-4768e80dfb9a/go.mod h1:9/y3cnZ5GKakj/H4y9r9GTjCvAFta7KLgSHPJJYc52M= github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= -github.com/cockroachdb/pebble v0.0.0-20230209160836-829675f94811 h1:ytcWPaNPhNoGMWEhDvS3zToKcDpRsLuRolQJBVGdozk= -github.com/cockroachdb/pebble v0.0.0-20230209160836-829675f94811/go.mod h1:Nb5lgvnQ2+oGlE/EyZy4+2/CxRh9KfvCXnag1vtpxVM= -github.com/cockroachdb/redact v1.1.3 h1:AKZds10rFSIj7qADf0g46UixK8NNLwWTNdCIGS5wfSQ= -github.com/cockroachdb/redact v1.1.3/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= -github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= -github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cockroachdb/pebble v1.1.1 h1:XnKU22oiCLy2Xn8vp1re67cXg4SAasg/WDt1NtcRFaw= +github.com/cockroachdb/pebble v1.1.1/go.mod h1:4exszw1r40423ZsmkG/09AFEG83I0uDgfujJdbL6kYU= +github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30= +github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= +github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= +github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ= +github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI= +github.com/consensys/gnark-crypto v0.12.1 h1:lHH39WuuFgVHONRl3J0LRBtuYdQTumFSDtJF7HpyG8M= +github.com/consensys/gnark-crypto v0.12.1/go.mod h1:v2Gy7L/4ZRosZ7Ivs+9SfUDr0f5UlG+EM5t7MPHiLuY= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/crate-crypto/go-ipa v0.0.0-20240223125850-b1e8a79f509c h1:uQYC5Z1mdLRPrZhHjHxufI8+2UG/i25QG92j0Er9p6I= +github.com/crate-crypto/go-ipa v0.0.0-20240223125850-b1e8a79f509c/go.mod h1:geZJZH3SzKCqnz5VT0q/DyIG/tvu/dZk+VIfXicupJs= +github.com/crate-crypto/go-kzg-4844 v1.0.0 h1:TsSgHwrkTKecKJ4kadtHi4b3xHW5dCFUDFnUp1TsawI= +github.com/crate-crypto/go-kzg-4844 v1.0.0/go.mod h1:1kMhvPgI0Ky3yIa+9lFySEBUBXkYxeOi8ZF1sYioxhc= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/crytic/medusa-geth v0.0.0-20230221190257-777a77b25150 h1:Helt4ysP5N0cJzvhBsx4JcAOte5gD4whzamaXWpg37M= -github.com/crytic/medusa-geth v0.0.0-20230221190257-777a77b25150/go.mod h1:DuefStAgaxoaYGLR0FueVcVbehmn5n9QUcVrMCuOvuc= +github.com/crytic/medusa-geth v0.0.0-20240919134035-0fd368c28419 h1:MJXzWPObZtF0EMRqX64JkzJDj+GMLPxg3XK5xb12FFU= +github.com/crytic/medusa-geth v0.0.0-20240919134035-0fd368c28419/go.mod h1:ajGCVsk6ctffGwe9TSDQqj4HIUUQ1WdUit5tWFNl8Tw= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/deckarep/golang-set/v2 v2.1.0 h1:g47V4Or+DUdzbs8FxCCmgb6VYd+ptPAngjM6dtGktsI= -github.com/deckarep/golang-set/v2 v2.1.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4= -github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0= -github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 h1:HbphB4TFFXpv7MNrT52FGrrgVXF1owhMVTHFZIlnvd4= -github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0/go.mod h1:DZGJHZMqrU4JJqFAWUS2UO1+lbSKsdiOoYi9Zzey7Fc= -github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4= -github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= -github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/edsrzf/mmap-go v1.0.0 h1:CEBF7HpRnUCSJgGUb5h1Gm7e3VkmVDrR8lvWVLtrOFw= -github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw= -github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= -github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/deckarep/golang-set/v2 v2.6.0 h1:XfcQbWM1LlMB8BsJ8N9vW5ehnnPVIw0je80NsVHagjM= +github.com/deckarep/golang-set/v2 v2.6.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4= +github.com/decred/dcrd/crypto/blake256 v1.0.1 h1:7PltbUIQB7u/FfZ39+DGa/ShuMyJ5ilcvdfma9wOH6Y= +github.com/decred/dcrd/crypto/blake256 v1.0.1/go.mod h1:2OfgNZ5wDpcsFmHmCK5gZTPcCXqlm2ArzUIkw9czNJo= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 h1:rpfIENRNNilwHwZeG5+P150SMrnNEcHYvcCuK6dPZSg= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0= +github.com/ethereum/c-kzg-4844 v1.0.2 h1:8tV84BCEiPeOkiVgW9mpYBeBUir2bkCNVqxPwwVeO+s= +github.com/ethereum/c-kzg-4844 v1.0.2/go.mod h1:VewdlzQmpT5QSrVhbBuGoCdFJkpaJlO1aQputP83wc0= +github.com/ethereum/go-verkle v0.1.1-0.20240306133620-7d920df305f0 h1:KrE8I4reeVvf7C1tm8elRjj4BdscTYzz/WAbYyf/JI4= +github.com/ethereum/go-verkle v0.1.1-0.20240306133620-7d920df305f0/go.mod h1:D9AJLVXSyZQXJQVk8oh1EwjISE+sJTn2duYIZC0dy3w= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= +github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/fxamacker/cbor v1.5.1 h1:XjQWBgdmQyqimslUh5r4tUGmoqzHmBFQOImkWGi2awg= github.com/fxamacker/cbor v1.5.1/go.mod h1:3aPGItF174ni7dDzd6JZ206H8cmr4GDNBGpPa971zsU= -github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc= -github.com/getsentry/sentry-go v0.12.0/go.mod h1:NSap0JBYWzHND8oMbyi0+XZhUalc1TBdRL1M71JZW2c= -github.com/getsentry/sentry-go v0.18.0 h1:MtBW5H9QgdcJabtZcuJG80BMOwaBpkRDZkxRkNC1sN0= -github.com/getsentry/sentry-go v0.18.0/go.mod h1:Kgon4Mby+FJ7ZWHFUAZgVaIa8sxHtnRJRLTXZr51aKQ= -github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= -github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= -github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= -github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= +github.com/gballet/go-libpcsclite v0.0.0-20191108122812-4678299bea08 h1:f6D9Hr8xV8uYKlyuj8XIruxlh9WjVjdh1gIicAS7ays= +github.com/gballet/go-libpcsclite v0.0.0-20191108122812-4678299bea08/go.mod h1:x7DCsMOv1taUwEWCzT4cmDeAkigA5/QCwUodaVOe8Ww= +github.com/getsentry/sentry-go v0.28.1 h1:zzaSm/vHmGllRM6Tpx1492r0YDzauArdBfkJRtY6P5k= +github.com/getsentry/sentry-go v0.28.1/go.mod h1:1fQZ+7l7eeJ3wYi82q5Hg8GqAPgefRq+FP/QhafYVgg= github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= -github.com/go-kit/kit v0.8.0 h1:Wz+5lgoB0kkuqLEc6NVmwRknTKP6dTGbSqvhZtBI/j0= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.5.1 h1:otpy5pqBCBZ1ng9RQ0dPu4PN7ba75Y/aA+UpowDyNVA= -github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= -github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-stack/stack v1.8.1 h1:ntEHSVwIt7PNXNpgPmVfMrNhLtgjlmnZha2kOpuRiDw= -github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4= -github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= -github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= -github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= -github.com/gogo/googleapis v0.0.0-20180223154316-0cd9801be74a/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= -github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gofrs/flock v0.12.0 h1:xHW8t8GPAiGtqz7KxiSqfOEXwpOaqhpYZrTE2MQBgXY= +github.com/gofrs/flock v0.12.0/go.mod h1:FirDy1Ing0mI2+kB6wk+vyyAH+e6xiE+EYA0jnzV9jc= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/gogo/status v1.1.0/go.mod h1:BFv9nrluPLmrS0EmGVvLaPNmRosr9KapBYd5/hpY1WM= -github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/gomodule/redigo v1.7.1-0.20190724094224-574c33c3df38/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb h1:PBC98N2aIaM3XXiurYmW7fx4GZkL8feAMVq7nEjURHk= +github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= -github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= -github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/holiman/big v0.0.0-20221017200358-a027dc42d04e h1:pIYdhNkDh+YENVNi3gto8n9hAmRxKxoar0iE6BLucjw= -github.com/holiman/big v0.0.0-20221017200358-a027dc42d04e/go.mod h1:j9cQbcqHQujT0oKJ38PylVfqohClLr3CvDC+Qcg+lhU= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/holiman/bloomfilter/v2 v2.0.3 h1:73e0e/V0tCydx14a0SCYS/EWCxgwLZ18CZcZKVu0fao= github.com/holiman/bloomfilter/v2 v2.0.3/go.mod h1:zpoh+gs7qcpqrHr3dB55AMiJwo0iURXE7ZOP9L9hSkA= -github.com/holiman/uint256 v1.2.1 h1:XRtyuda/zw2l+Bq/38n5XUoEF72aSOu/77Thd9pPp2o= -github.com/holiman/uint256 v1.2.1/go.mod h1:y4ga/t+u+Xwd7CpDgZESaRcWy0I7XMlTMA25ApIH5Jw= +github.com/holiman/uint256 v1.3.0 h1:4wdcm/tnd0xXdu7iS3ruNvxkWwrb4aeBQv19ayYn8F4= +github.com/holiman/uint256 v1.3.0/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/hydrogen18/memlistener v0.0.0-20200120041712-dcc25e7acd91/go.mod h1:qEIFzExnS6016fRpRfxrExeVn2gbClQA99gQhnIcdhE= -github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/huin/goupnp v1.3.0 h1:UvLUlWDNpoUdYzb2TCn+MuTWtcjXKSza2n6CBdQ0xXc= +github.com/huin/goupnp v1.3.0/go.mod h1:gnGPsThkYa7bFi/KWmEysQRf48l2dvR5bxr2OFckNX8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/iris-contrib/blackfriday v2.0.0+incompatible/go.mod h1:UzZ2bDEoaSGPbkg6SAB4att1aAwTmVIx/5gCVqeyUdI= -github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/+fafWORmlnuysV2EMP8MW+qe0= -github.com/iris-contrib/jade v1.1.3/go.mod h1:H/geBymxJhShH5kecoiOCSssPX7QWYH7UaeZTSWddIk= -github.com/iris-contrib/pongo2 v0.0.1/go.mod h1:Ssh+00+3GAZqSQb30AvBRNxBx7rf0GqwkjqxNd0u65g= -github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= -github.com/kataras/golog v0.0.10/go.mod h1:yJ8YKCmyL+nWjERB90Qwn+bdyBZsaQwU3bTVFgkFIp8= -github.com/kataras/iris/v12 v12.1.8/go.mod h1:LMYy4VlP67TQ3Zgriz8RE2h2kMZV2SgMYbq3UhfoFmE= -github.com/kataras/neffos v0.0.14/go.mod h1:8lqADm8PnbeFfL7CLXh1WHw53dG27MC3pgi2R1rmoTE= -github.com/kataras/pio v0.0.2/go.mod h1:hAoW0t9UmXi4R5Oyq5Z4irTbaTsOemSrDGUtaTl7Dro= -github.com/kataras/sitemap v0.0.5/go.mod h1:KY2eugMKiPwsJgx7+U103YZehfvNGOXURubcGyk0Bz8= +github.com/jackpal/go-nat-pmp v1.0.2 h1:KzKSgb7qkJvOUTqYl9/Hg/me3pWgBmERKrTGD7BdWus= +github.com/jackpal/go-nat-pmp v1.0.2/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.15.15 h1:EF27CXIuDsYJ6mmvtBRlEuB2UVOqHG1tAXgZ7yIO+lw= -github.com/klauspost/compress v1.15.15/go.mod h1:ZcK2JAFqKOpnBlxcLsJzYfrS9X1akm9fHZNnD9+Vo/4= -github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= +github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/labstack/echo/v4 v4.5.0/go.mod h1:czIriw4a0C1dFun+ObrXp7ok03xON0N1awStJ6ArI7Y= -github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= +github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU= -github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= -github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= -github.com/mediocregopher/radix/v3 v3.4.2/go.mod h1:8FL3F6UQRXHXIBSPUs5h0RybMF8i4n7wVopoX3x7Bv8= -github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= -github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= -github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= -github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= -github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY= +github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU= +github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY= +github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.14.0 h1:2mOpI4JVVPBN+WQRa0WKH2eXR+Ey+uK4n7Zj0aYpIQA= github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk= +github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0= github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.14.0 h1:nJdhIvne2eSX/XRAFV9PcvFFRbrjbcTUj0VP62TMhnw= -github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= -github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= -github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.39.0 h1:oOyhkDq05hPZKItWVBkJ6g6AtGxi+fy7F4JvUV8uhsI= -github.com/prometheus/common v0.39.0/go.mod h1:6XBZ7lYdLCbkAVhwRsWTZn+IN5AB9F/NXd5w0BbEX0Y= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.9.0 h1:wzCHvIvM5SxWqYvwgVL7yJY8Lz3PKn49KQtpgMYJfhI= -github.com/prometheus/procfs v0.9.0/go.mod h1:+pB4zwohETzFnmlpe6yd2lSc+0/46IYZRB/chUwxUZY= -github.com/prometheus/tsdb v0.10.0 h1:If5rVCMTp6W2SiRAQFlbpJNgVlgMEd+U2GZckwK38ic= -github.com/prometheus/tsdb v0.10.0/go.mod h1:oi49uRhEe9dPUTlS3JRZOwJuVi6tmh10QSgwXEyGCt4= +github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE= +github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho= +github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= +github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= +github.com/prometheus/common v0.55.0 h1:KEi6DK7lXW/m7Ig5i47x0vRzuBsHuvJdi5ee6Y3G1dc= +github.com/prometheus/common v0.55.0/go.mod h1:2SECS4xJG1kd8XF9IcM1gMX6510RAEL65zxzNImwdc8= +github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= +github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rivo/uniseg v0.4.3 h1:utMvzDsuh3suAEnhH0RdHmoPbU648o6CvXxTx4SBMOw= -github.com/rivo/uniseg v0.4.3/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= -github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= +github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/schollz/closestmatch v2.1.0+incompatible/go.mod h1:RtP1ddjLong6gTkbtmuhtR2uUrrJOpYzYRvbcPAid+g= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= -github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= -github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= -github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= +github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= +github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= +github.com/status-im/keycard-go v0.3.2 h1:YusIF/bHx6YZis8UTOJrpZFnTs4IkRBdmJXqdiXkpFE= +github.com/status-im/keycard-go v0.3.2/go.mod h1:wlp8ZLbsmrF6g6WjugPAx+IzoLrkdf9+mHxBEeo3Hbg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/supranational/blst v0.3.12 h1:Vfas2U2CFHhniv2QkUm2OVa1+pGTdqtpqm9NnhUUbZ8= +github.com/supranational/blst v0.3.12/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= -github.com/tklauser/go-sysconf v0.3.11 h1:89WgdJhk5SNwJfu+GKyYveZ4IaJ7xAkecBo+KdJV0CM= -github.com/tklauser/go-sysconf v0.3.11/go.mod h1:GqXfhXY3kiPa0nAXPDIQIWzJbMCB7AmcWpGR8lSZfqI= -github.com/tklauser/numcpus v0.6.0 h1:kebhY2Qt+3U6RNK7UqpYNA+tJ23IBEGKkB7JQBfDYms= -github.com/tklauser/numcpus v0.6.0/go.mod h1:FEZLMke0lhOUG6w2JadTzp0a+Nl8PF/GFkQ5UVIcaL4= -github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= -github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= -github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= -github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= -github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.6.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBnvPM1Su9w= -github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= -github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= -github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= +github.com/tklauser/go-sysconf v0.3.14 h1:g5vzr9iPFFz24v2KZXs/pvpvh8/V9Fw6vQK5ZZb78yU= +github.com/tklauser/go-sysconf v0.3.14/go.mod h1:1ym4lWMLUOhuBOPGtRcJm7tEGX4SCYNEEEtghGG/8uY= +github.com/tklauser/numcpus v0.8.0 h1:Mx4Wwe/FjZLeQsK/6kt2EOepwwSl7SmJrK5bV/dXYgY= +github.com/tklauser/numcpus v0.8.0/go.mod h1:ZJZlAY+dmR4eut8epnzf0u/VwodKmryxR8txiloSqBE= +github.com/tyler-smith/go-bip39 v1.1.0 h1:5eUemwrMargf3BSLRRCalXT93Ns6pQJIjYQN2nyfOP8= +github.com/tyler-smith/go-bip39 v1.1.0/go.mod h1:gUYDtqQw1JS3ZJ8UWVcGTGqqr6YIN3CWg+kkNaLt55U= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= -github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= -github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= -github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= -github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= -github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg= -github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.8.0 h1:pd9TJtTueMTVQXzk8E2XESSMQDj/U7OUu0PqJqPXQjQ= -golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20230206171751-46f607a40771 h1:xP7rWLUr1e1n2xkK5YB4LI0hPEy3LJC6Wk+D4pGlOJg= -golang.org/x/exp v0.0.0-20230206171751-46f607a40771/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30= +golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M= +golang.org/x/exp v0.0.0-20240707233637-46b078467d37 h1:uLDX+AfeFCct3a2C7uIWBKMJIR3CJMhcgfrUAqjRK6w= +golang.org/x/exp v0.0.0-20240707233637-46b078467d37/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190327091125-710a502c58a2/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20211008194852-3b03d305991f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.9.0 h1:aWJ/m6xSmxWBx+V0XRHTlrYrPG56jKsLdTFmsSsCzOM= -golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys= +golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220405052023-b1e9470b6e64/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU= -golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= +golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= +golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190327201419-c70d86f8b7cf/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df h1:5Pf6pFKu98ODmgnpvkJ3kFUOQGGLIzLIkbzUHp47618= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/genproto v0.0.0-20180518175338-11a468237815/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= -google.golang.org/grpc v1.12.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= -google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= -gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y= -gopkg.in/ini.v1 v1.51.1/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= -gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce h1:+JknDZhAj8YMt7GC73Ei8pv4MzjDUNPHgQWJdtMAaDU= -gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce/go.mod h1:5AcXVHNjg+BDxry382+8OKon8SEWiKktQR07RKPsv1c= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v3 v3.0.0-20191120175047-4206685974f2/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +rsc.io/tmplfunc v0.0.3 h1:53XFQh69AfOa8Tw0Jm7t+GV7KZhOi6jzsCzTtKbMvzU= +rsc.io/tmplfunc v0.0.3/go.mod h1:AG3sTPzElb1Io3Yg4voV9AGZJuleGAwaVRxL9M49PhA= diff --git a/logging/colors/color_funcs.go b/logging/colors/color_funcs.go new file mode 100644 index 00000000..4a201dbb --- /dev/null +++ b/logging/colors/color_funcs.go @@ -0,0 +1,107 @@ +package colors + +import "fmt" + +// ColorFunc is an alias type for a coloring function that accepts anything and returns a colorized string +type ColorFunc = func(s any) string + +// Reset is a ColorFunc that simply returns the input as a string. It is basically a no-op and is used for resetting the +// color context during complex logging operations. +func Reset(s any) string { + return fmt.Sprintf("%v", s) +} + +// Black is a ColorFunc that returns a black-colorized string of the provided input +func Black(s any) string { + return Colorize(s, BLACK) +} + +// BlackBold is a ColorFunc that returns a black-bold-colorized string of the provided input +func BlackBold(s any) string { + return Colorize(Colorize(s, BLACK), BOLD) +} + +// Red is a ColorFunc that returns a red-colorized string of the provided input +func Red(s any) string { + return Colorize(s, RED) +} + +// RedBold is a ColorFunc that returns a red-bold-colorized string of the provided input +func RedBold(s any) string { + return Colorize(Colorize(s, RED), BOLD) +} + +// Green is a ColorFunc that returns a green-colorized string of the provided input +func Green(s any) string { + return Colorize(s, GREEN) +} + +// GreenBold is a ColorFunc that returns a green-bold-colorized string of the provided input +func GreenBold(s any) string { + return Colorize(Colorize(s, GREEN), BOLD) +} + +// Yellow is a ColorFunc that returns a yellow-colorized string of the provided input +func Yellow(s any) string { + return Colorize(s, YELLOW) +} + +// YellowBold is a ColorFunc that returns a yellow-bold-colorized string of the provided input +func YellowBold(s any) string { + return Colorize(Colorize(s, YELLOW), BOLD) +} + +// Blue is a ColorFunc that returns a blue-colorized string of the provided input +func Blue(s any) string { + return Colorize(s, BLUE) +} + +// BlueBold is a ColorFunc that returns a blue-bold-colorized string of the provided input +func BlueBold(s any) string { + return Colorize(Colorize(s, BLUE), BOLD) +} + +// Magenta is a ColorFunc that returns a magenta-colorized string of the provided input +func Magenta(s any) string { + return Colorize(s, MAGENTA) +} + +// MagentaBold is a ColorFunc that returns a magenta-bold-colorized string of the provided input +func MagentaBold(s any) string { + return Colorize(Colorize(s, MAGENTA), BOLD) +} + +// Cyan is a ColorFunc that returns a cyan-colorized string of the provided input +func Cyan(s any) string { + return Colorize(s, CYAN) +} + +// CyanBold is a ColorFunc that returns a cyan-bold-colorized string of the provided input +func CyanBold(s any) string { + return Colorize(Colorize(s, CYAN), BOLD) +} + +// White is a ColorFunc that returns a white-colorized string of the provided input +func White(s any) string { + return Colorize(s, WHITE) +} + +// WhiteBold is a ColorFunc that returns a white-bold-colorized string of the provided input +func WhiteBold(s any) string { + return Colorize(Colorize(s, WHITE), BOLD) +} + +// Bold is a ColorFunc that returns a bolded string of the provided input +func Bold(s any) string { + return Colorize(s, BOLD) +} + +// DarkGray is a ColorFunc that returns a dark-gray-colorized string of the provided input +func DarkGray(s any) string { + return Colorize(s, DARK_GRAY) +} + +// DarkGrayBold is a ColorFunc that returns a dark-gray-bold-colorized string of the provided input +func DarkGrayBold(s any) string { + return Colorize(Colorize(s, DARK_GRAY), BOLD) +} diff --git a/logging/colors/colorize_unix.go b/logging/colors/colorize_unix.go new file mode 100644 index 00000000..ba404662 --- /dev/null +++ b/logging/colors/colorize_unix.go @@ -0,0 +1,25 @@ +//go:build !windows +// +build !windows + +package colors + +import "fmt" + +var enabled = true + +// EnableColor enables the use of colors for non-windows systems. +func EnableColor() { enabled = true } + +// DisableColor disables the use of colors for non-windows systems. +func DisableColor() { enabled = false } + +// Colorize returns the string s wrapped in ANSI code c for non-windows systems +// Source: https://github.com/rs/zerolog/blob/4fff5db29c3403bc26dee9895e12a108aacc0203/console.go +func Colorize(s any, c Color) string { + // Return original string if explicitly disabled + if !enabled { + return fmt.Sprintf("%v", s) + } + + return fmt.Sprintf("\x1b[%dm%v\x1b[0m", c, s) +} diff --git a/logging/colors/colorize_windows.go b/logging/colors/colorize_windows.go new file mode 100644 index 00000000..4e57c269 --- /dev/null +++ b/logging/colors/colorize_windows.go @@ -0,0 +1,70 @@ +//go:build windows +// +build windows + +package colors + +import ( + "fmt" + "os" + + "golang.org/x/sys/windows" +) + +var enabled bool + +// EnableColor will make a kernel call to enable ANSI escape codes on both stdout and stderr. Note that if enablement +// on either stream fails, then coloring will not be enabled. +func EnableColor() { + var mode uint32 + fds := []uintptr{os.Stdout.Fd(), os.Stderr.Fd()} + + // Iterate across each file descriptor and enable coloring + for _, fd := range fds { + // Obtain our current console mode. + consoleHandle := windows.Handle(fd) + err := windows.GetConsoleMode(consoleHandle, &mode) + if err != nil { + enabled = false + return + } + + // If color is not enabled, try to enable it. + if mode&windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING == 0 { + err = windows.SetConsoleMode(consoleHandle, mode|windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING) + if err != nil { + enabled = false + return + } + } + + // Fetch the console mode once more + err = windows.GetConsoleMode(consoleHandle, &mode) + if err != nil { + enabled = false + return + } + + // Set our enabled status after trying to enable it. + enabled = mode&windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING != 0 + + // If we failed to enable on this file descriptor, exit out + if !enabled { + return + } + } +} + +// DisableColor will disable colors +func DisableColor() { enabled = false } + +// Colorize returns the string s wrapped in ANSI code c assuming that ANSI is supported on the Windows version +// Source: https://github.com/rs/zerolog/blob/4fff5db29c3403bc26dee9895e12a108aacc0203/console.go +func Colorize(s any, c Color) string { + // If ANSI is not supported then just return the original string + if !enabled { + return fmt.Sprintf("%v", s) + } + + // Otherwise, returned an ANSI-wrapped string + return fmt.Sprintf("\x1b[%dm%v\x1b[0m", c, s) +} diff --git a/logging/colors/constants.go b/logging/colors/constants.go new file mode 100644 index 00000000..4f3f555a --- /dev/null +++ b/logging/colors/constants.go @@ -0,0 +1,37 @@ +package colors + +type Color int + +// This is taken from zerolog's repo and will be used to colorize log output +// Source: https://github.com/rs/zerolog/blob/4fff5db29c3403bc26dee9895e12a108aacc0203/console.go +const ( + // BLACK is the ANSI code for black + BLACK Color = iota + 30 + // COLOR_RED is the ANSI code for red + RED + // GREEN is the ANSI code for green + GREEN + // YELLOW is the ANSI code for yellow + YELLOW + // BLUE is the ANSI code for blue + BLUE + // MAGENTA is the ANSI code for magenta + MAGENTA + // CYAN is the ANSI code for cyan + CYAN + // WHITE is the ANSI code for white + WHITE + // BOLD is the ANSI code for bold tet + BOLD = 1 + // DARK_GRAY is the ANSI code for dark gray + DARK_GRAY = 90 +) + +// This enum is to identify special unicode characters that will be used for pretty console output +const ( + // LEFT_ARROW is the unicode string for a left arrow glyph + LEFT_ARROW = "\u21fe" + + // BULLET_POINT is the unicode string for a triangular bullet point + BULLET_POINT = "\u2023" +) diff --git a/logging/colors/init.go b/logging/colors/init.go new file mode 100644 index 00000000..758c7d4f --- /dev/null +++ b/logging/colors/init.go @@ -0,0 +1,7 @@ +package colors + +// init will ensure that ANSI coloring is enabled on Windows and Unix systems. Note that ANSI coloring is enabled by +// default on Unix system and Windows needs specific kernel calls for enablement +func init() { + EnableColor() +} diff --git a/logging/init.go b/logging/init.go new file mode 100644 index 00000000..0f47cf81 --- /dev/null +++ b/logging/init.go @@ -0,0 +1,16 @@ +package logging + +import ( + "github.com/rs/zerolog" + "github.com/rs/zerolog/pkgerrors" +) + +// init will instantiate the global logger and set up some global parameters from the zerolog package. +func init() { + // Instantiate the global logger + GlobalLogger = NewLogger(zerolog.Disabled) + + // Setup stack trace support and set the timestamp format to UNIX + zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack + zerolog.TimeFieldFormat = zerolog.TimeFormatUnix +} diff --git a/logging/log_buffer.go b/logging/log_buffer.go new file mode 100644 index 00000000..761d7ab5 --- /dev/null +++ b/logging/log_buffer.go @@ -0,0 +1,39 @@ +package logging + +// LogBuffer is a helper object that can be used to buffer log messages. A log buffer is effectively a list of arguments +// of any type. This object is especially useful when attempting to log complex objects (e.g. execution trace) that have +// complex coloring schemes and formatting. The LogBuffer can then be passed on to a Logger object to then log the buffer +// to console and any other writers (e.g. file). +type LogBuffer struct { + // elements describes the list of elements that eventually need to be concatenated together in the Logger + elements []any +} + +// NewLogBuffer creates a new LogBuffer object +func NewLogBuffer() *LogBuffer { + return &LogBuffer{ + elements: make([]any, 0), + } +} + +// Append appends a variadic set of elements to the list of elements +func (l *LogBuffer) Append(newElements ...any) { + l.elements = append(l.elements, newElements...) +} + +// Elements returns the list of elements stored in this LogBuffer +func (l *LogBuffer) Elements() []any { + return l.elements +} + +// String provides the non-colorized string representation of the LogBuffer +func (l LogBuffer) String() string { + _, msg, _, _ := buildMsgs(l.elements...) + return msg +} + +// ColorString provides the colorized string representation of the LogBuffer +func (l LogBuffer) ColorString() string { + msg, _, _, _ := buildMsgs(l.elements...) + return msg +} diff --git a/logging/logger.go b/logging/logger.go new file mode 100644 index 00000000..e52f4414 --- /dev/null +++ b/logging/logger.go @@ -0,0 +1,464 @@ +package logging + +import ( + "fmt" + "github.com/crytic/medusa/logging/colors" + "github.com/rs/zerolog" + "io" + "strings" +) + +// GlobalLogger describes a Logger that is disabled by default and is instantiated when the fuzzer is created. Each module/package +// should create its own sub-logger. This allows to create unique logging instances depending on the use case. +var GlobalLogger *Logger + +// Logger describes a custom logging object that can log events to any arbitrary channel in structured, unstructured with colors, +// and unstructured formats. +type Logger struct { + // level describes the log level + level zerolog.Level + + // structuredLogger describes a logger that will be used to output structured logs to any arbitrary channel. + structuredLogger zerolog.Logger + + // structuredWriters describes the various channels that the output from the structuredLogger will go to. + structuredWriters []io.Writer + + // unstructuredLogger describes a logger that will be used to stream un-colorized, unstructured output to any arbitrary channel. + unstructuredLogger zerolog.Logger + + // unstructuredWriters describes the various channels that the output from the unstructuredLogger will go to. + unstructuredWriters []io.Writer + + // unstructuredColorLogger describes a logger that will be used to stream colorized, unstructured output to any arbitrary channel. + unstructuredColorLogger zerolog.Logger + + // unstructuredColorWriters describes the various channels that the output from the unstructuredColoredLogger will go to. + unstructuredColorWriters []io.Writer +} + +// LogFormat describes what format to log in +type LogFormat string + +const ( + // STRUCTURED describes that logging should be done in structured JSON format + STRUCTURED LogFormat = "structured" + // UNSTRUCTRED describes that logging should be done in an unstructured format + UNSTRUCTURED LogFormat = "unstructured" +) + +// StructuredLogInfo describes a key-value mapping that can be used to log structured data +type StructuredLogInfo map[string]any + +// NewLogger will create a new Logger object with a specific log level. By default, a logger that is instantiated +// with this function is not usable until a log channel is added. To add or remove channels that the logger +// streams logs to, call the Logger.AddWriter and Logger.RemoveWriter functions. +func NewLogger(level zerolog.Level) *Logger { + return &Logger{ + level: level, + structuredLogger: zerolog.New(nil).Level(level), + structuredWriters: make([]io.Writer, 0), + unstructuredLogger: zerolog.New(nil).Level(level), + unstructuredWriters: make([]io.Writer, 0), + unstructuredColorLogger: zerolog.New(nil).Level(level), + unstructuredColorWriters: make([]io.Writer, 0), + } +} + +// NewSubLogger will create a new Logger with unique context in the form of a key-value pair. The expected use of this +// is for each module or component of the system to create their own contextualized logs. The key can be used to search +// for logs from a specific module or component. +func (l *Logger) NewSubLogger(key string, value string) *Logger { + // Create the sub-loggers with the new key-value context + subStructuredLogger := l.structuredLogger.With().Str(key, value).Logger() + subUnstructuredColoredLogger := l.unstructuredColorLogger.With().Str(key, value).Logger() + subUnstructuredLogger := l.unstructuredLogger.With().Str(key, value).Logger() + + // Create new slices for the writers since we want to make a deep copy for each one + subStructuredWriters := make([]io.Writer, len(l.structuredWriters)) + copy(subStructuredWriters, l.structuredWriters) + + subUnstructuredColorWriters := make([]io.Writer, len(l.unstructuredColorWriters)) + copy(subUnstructuredColorWriters, l.unstructuredColorWriters) + + subUnstructuredWriters := make([]io.Writer, len(l.unstructuredWriters)) + copy(subUnstructuredWriters, l.unstructuredWriters) + + // Return a new logger + return &Logger{ + level: l.level, + structuredLogger: subStructuredLogger, + structuredWriters: subStructuredWriters, + unstructuredColorLogger: subUnstructuredColoredLogger, + unstructuredColorWriters: subUnstructuredColorWriters, + unstructuredLogger: subUnstructuredLogger, + unstructuredWriters: subUnstructuredWriters, + } +} + +// AddWriter will add a writer to which log output will go to. If the format is structured then the writer will get +// structured output. If the writer is unstructured, then the writer has the choice to either receive colored or un-colored +// output. Note that unstructured writers will be converted into a zerolog.ConsoleWriter to maintain the same format +// across all unstructured output streams. +func (l *Logger) AddWriter(writer io.Writer, format LogFormat, colored bool) { + // First, try to add the writer to the list of channels that want structured logs + if format == STRUCTURED { + for _, w := range l.structuredWriters { + if w == writer { + // Writer already exists, return + return + } + } + // Add the writer and recreate the logger + l.structuredWriters = append(l.structuredWriters, writer) + l.structuredLogger = zerolog.New(zerolog.MultiLevelWriter(l.structuredWriters...)).Level(l.level).With().Timestamp().Logger() + return + } + + // Now that we know we are going to create an unstructured writer, we will create an unstructured writer with(out) coloring + // using zerolog's console writer object. + unstructuredWriter := formatUnstructuredWriter(writer, l.level, colored) + + // Now, try to add the writer to the list of channels that want unstructured, colored logs + if format == UNSTRUCTURED && colored { + for _, w := range l.unstructuredColorWriters { + // We must convert the writer to a console writer to correctly check for existence within the list + if w.(zerolog.ConsoleWriter).Out == writer { + // Writer already exists, return + return + } + } + // Add the unstructured writer and recreate the logger + l.unstructuredColorWriters = append(l.unstructuredColorWriters, unstructuredWriter) + l.unstructuredColorLogger = zerolog.New(zerolog.MultiLevelWriter(l.unstructuredColorWriters...)).Level(l.level).With().Timestamp().Logger() + } + + // Otherwise, try to add the writer to the list of channels that want unstructured, un-colored logs + if format == UNSTRUCTURED && !colored { + for _, w := range l.unstructuredWriters { + // We must convert the writer to a console writer to correctly check for existence within the list + if w.(zerolog.ConsoleWriter).Out == writer { + // Writer already exists, return + return + } + } + // Add the unstructured writer and recreate the logger + l.unstructuredWriters = append(l.unstructuredWriters, unstructuredWriter) + l.unstructuredLogger = zerolog.New(zerolog.MultiLevelWriter(l.unstructuredWriters...)).Level(l.level).With().Timestamp().Logger() + } +} + +// RemoveWriter will remove a writer from the list of writers that the logger manages. The writer will be either removed +// from the list of structured, unstructured and colored, or unstructured and un-colored writers. If the same writer +// is receiving multiple types of log output (e.g. structured and unstructured with color) then this function must be called +// multiple times. If the writer does not exist in any list, then this function is a no-op. +func (l *Logger) RemoveWriter(writer io.Writer, format LogFormat, colored bool) { + // First, try to remove the writer from the list of structured writers + if format == STRUCTURED { + // Check for writer existence + for i, w := range l.structuredWriters { + if w == writer { + // Remove the writer and recreate the logger + l.structuredWriters = append(l.structuredWriters[:i], l.structuredWriters[i+1:]...) + l.structuredLogger = zerolog.New(zerolog.MultiLevelWriter(l.structuredWriters...)).Level(l.level).With().Timestamp().Logger() + } + } + } + + // Now, try to remove the writer from the list of unstructured, colored writers + if format == UNSTRUCTURED && colored { + // Check for writer existence + for i, w := range l.unstructuredColorWriters { + // We must convert the writer to a console writer to correctly check for existence within the list + if w.(zerolog.ConsoleWriter).Out == writer { + // Remove the writer and recreate the logger + l.unstructuredColorWriters = append(l.unstructuredColorWriters[:i], l.unstructuredColorWriters[i+1:]...) + l.unstructuredColorLogger = zerolog.New(zerolog.MultiLevelWriter(l.unstructuredColorWriters...)).Level(l.level).With().Timestamp().Logger() + } + } + } + + // Otherwise, try to remove the writer from the list of unstructured, un-colored writers + if format == UNSTRUCTURED && !colored { + // Check for writer existence + for i, w := range l.unstructuredWriters { + // We must convert the writer to a console writer to correctly check for existence within the list + if w.(zerolog.ConsoleWriter).Out == writer { + // Remove the writer and recreate the logger + l.unstructuredWriters = append(l.unstructuredWriters[:i], l.unstructuredWriters[i+1:]...) + l.unstructuredLogger = zerolog.New(zerolog.MultiLevelWriter(l.unstructuredWriters...)).Level(l.level).With().Timestamp().Logger() + } + } + } +} + +// Level will get the log level of the Logger +func (l *Logger) Level() zerolog.Level { + return l.level +} + +// SetLevel will update the log level of the Logger +func (l *Logger) SetLevel(level zerolog.Level) { + l.level = level + + // Update the level of each underlying logger + l.structuredLogger = l.structuredLogger.Level(level) + l.unstructuredColorLogger = l.unstructuredColorLogger.Level(level) + l.unstructuredLogger = l.unstructuredLogger.Level(level) + +} + +// Trace is a wrapper function that will log a trace event +func (l *Logger) Trace(args ...any) { + // Build the messages and retrieve any errors or associated structured log info + colorMsg, noColorMsg, errs, info := buildMsgs(args...) + + // Instantiate log events + structuredLog := l.structuredLogger.Trace() + unstructuredColoredLog := l.unstructuredColorLogger.Trace() + unstructuredLog := l.unstructuredLogger.Trace() + + // Chain the structured log info, errors, and messages and send off the logs + chainStructuredLogInfoErrorsAndMsgs(structuredLog, unstructuredColoredLog, unstructuredLog, info, errs, colorMsg, noColorMsg) +} + +// Debug is a wrapper function that will log a debug event +func (l *Logger) Debug(args ...any) { + // Build the messages and retrieve any errors or associated structured log info + colorMsg, noColorMsg, errs, info := buildMsgs(args...) + + // Instantiate log events + structuredLog := l.structuredLogger.Debug() + unstructuredColoredLog := l.unstructuredColorLogger.Debug() + unstructuredLog := l.unstructuredLogger.Debug() + + // Chain the structured log info, errors, and messages and send off the logs + chainStructuredLogInfoErrorsAndMsgs(structuredLog, unstructuredColoredLog, unstructuredLog, info, errs, colorMsg, noColorMsg) +} + +// Info is a wrapper function that will log an info event +func (l *Logger) Info(args ...any) { + // Build the messages and retrieve any errors or associated structured log info + colorMsg, noColorMsg, errs, info := buildMsgs(args...) + + // Instantiate log events + structuredLog := l.structuredLogger.Info() + unstructuredColoredLog := l.unstructuredColorLogger.Info() + unstructuredLog := l.unstructuredLogger.Info() + + // Chain the structured log info, errors, and messages and send off the logs + chainStructuredLogInfoErrorsAndMsgs(structuredLog, unstructuredColoredLog, unstructuredLog, info, errs, colorMsg, noColorMsg) +} + +// Warn is a wrapper function that will log a warning event both on console +func (l *Logger) Warn(args ...any) { + // Build the messages and retrieve any errors or associated structured log info + colorMsg, noColorMsg, errs, info := buildMsgs(args...) + + // Instantiate log events + structuredLog := l.structuredLogger.Warn() + unstructuredColoredLog := l.unstructuredColorLogger.Warn() + unstructuredLog := l.unstructuredLogger.Warn() + + // Chain the structured log info, errors, and messages and send off the logs + chainStructuredLogInfoErrorsAndMsgs(structuredLog, unstructuredColoredLog, unstructuredLog, info, errs, colorMsg, noColorMsg) +} + +// Error is a wrapper function that will log an error event. +func (l *Logger) Error(args ...any) { + // Build the messages and retrieve any errors or associated structured log info + colorMsg, noColorMsg, errs, info := buildMsgs(args...) + + // Instantiate log events + structuredLog := l.structuredLogger.Error() + unstructuredColoredLog := l.unstructuredColorLogger.Error() + unstructuredLog := l.unstructuredLogger.Error() + + // Chain the structured log info, errors, and messages and send off the logs + chainStructuredLogInfoErrorsAndMsgs(structuredLog, unstructuredColoredLog, unstructuredLog, info, errs, colorMsg, noColorMsg) +} + +// Panic is a wrapper function that will log a panic event +func (l *Logger) Panic(args ...any) { + // Build the messages and retrieve any errors or associated structured log info + colorMsg, noColorMsg, errs, info := buildMsgs(args...) + + // Instantiate log events + structuredLog := l.structuredLogger.Panic() + unstructuredColoredLog := l.unstructuredColorLogger.Panic() + unstructuredLog := l.unstructuredLogger.Panic() + + // Chain the structured log info, errors, and messages and send off the logs + chainStructuredLogInfoErrorsAndMsgs(structuredLog, unstructuredColoredLog, unstructuredLog, info, errs, colorMsg, noColorMsg) +} + +// buildMsgs describes a function that takes in a variadic list of arguments of any type and returns two strings and, +// optionally, a list of errors and a StructuredLogInfo object. The first string will be a colorized-message while the +// second string will be a non-colorized one. Colors are applied if one or more of the input arguments are of type +// colors.ColorFunc. The colorized message can be used for channels that request unstructured, colorized log output +// while the non-colorized one can be used for structured streams and unstructured streams that don't want color. The +// errors and the StructuredLogInfo can be used to add additional context to log messages. +func buildMsgs(args ...any) (string, string, []error, StructuredLogInfo) { + // Guard clause + if len(args) == 0 { + return "", "", nil, nil + } + + // Initialize the base color context, the string buffers and the structured log info object + colorCtx := colors.Reset + colorMsg := make([]string, 0) + noColorMsg := make([]string, 0) + errs := make([]error, 0) + var info StructuredLogInfo + + // Iterate through each argument in the list and switch on type + for _, arg := range args { + switch t := arg.(type) { + case colors.ColorFunc: + // If the argument is a color function, switch the current color context + colorCtx = t + case StructuredLogInfo: + // Note that only one structured log info can be provided for each log message + info = t + case error: + // Append error to the list of errors + errs = append(errs, t) + default: + // In the base case, append the object to the two string buffers. The colored string buffer will have the + // current color context applied to it. + colorMsg = append(colorMsg, colorCtx(t)) + noColorMsg = append(noColorMsg, fmt.Sprintf("%v", t)) + } + } + + return strings.Join(colorMsg, ""), strings.Join(noColorMsg, ""), errs, info +} + +// chainStructuredLogInfoErrorsAndMsgs describes a function that takes in a *zerolog.Event for the structured, unstructured +// with color, and unstructured without colors log streams, chains any StructuredLogInfo and errors provided to it, +// adds the associated messages, and sends out the logs to their respective channels. Note that the StructuredLogInfo object +// is only appended to the structured log event and not to the unstructured ones. Additionally, note that errors are appended as a +// formatted bulleted list for unstructured logging while for the structured logger they get appended as a key-value pair. +func chainStructuredLogInfoErrorsAndMsgs(structuredLog *zerolog.Event, unstructuredColoredLog *zerolog.Event, unstructuredLog *zerolog.Event, info StructuredLogInfo, errs []error, colorMsg string, noColorMsg string) { + // First, we need to create a formatted error string for unstructured output + var errStr string + for _, err := range errs { + // Append a bullet point and the formatted error to the error string + errStr += "\n" + colors.BULLET_POINT + " " + err.Error() + } + + // Add structured error element to the multi-log output and append the error string to the console message + // TODO: Add support for stack traces in the future + if len(errs) != 0 { + structuredLog.Errs("errors", errs) + } + + // The structured message will be the one without any potential errors appended to it since the errors will be provided + // as a key-value pair + structuredMsg := noColorMsg + + // Add the colorized and non-colorized version of the error string to the colorized and non-colorized messages, respectively. + if len(errStr) > 0 { + colorMsg += colors.Red(errStr) + noColorMsg += errStr + } + + // If we are provided a structured log info object, add that as a key-value pair to the structured log event + if info != nil { + structuredLog.Any("info", info) + } + + // Append the messages to each event. This will also result in the log events being sent out to their respective + // streams. Note that we are deferring the message to two of the three loggers multi logger in case we are logging a panic + // and want to make sure that all channels receive the panic log. + defer func() { + structuredLog.Msg(structuredMsg) + unstructuredLog.Msg(noColorMsg) + }() + unstructuredColoredLog.Msg(colorMsg) +} + +// formatUnstructuredWriter will create a custom-formatted zerolog.ConsoleWriter from an arbitrary io.Writer. A zerolog.ConsoleWriter is +// what is used under-the-hood to support unstructured log output. Custom formatting is applied to specific fields, +// timestamps, and the log level strings. If requested, coloring may be applied to the log level strings. +func formatUnstructuredWriter(writer io.Writer, level zerolog.Level, colored bool) zerolog.ConsoleWriter { + // Create the console writer + consoleWriter := zerolog.ConsoleWriter{Out: writer, NoColor: !colored} + + // Get rid of the timestamp for unstructured output + consoleWriter.FormatTimestamp = func(i interface{}) string { + return "" + } + + // If we are above debug level, we want to get rid of the `module` component when logging to unstructured streams + if level > zerolog.DebugLevel { + consoleWriter.FieldsExclude = []string{"module"} + } + + // If coloring is enabled, we will return a custom, colored string for each log severity level + // Otherwise, we will just return a non-colorized string for each log severity level + consoleWriter.FormatLevel = func(i any) string { + // Create a level object for better switch logic + level, err := zerolog.ParseLevel(i.(string)) + if err != nil { + panic(fmt.Sprintf("unable to parse the log level: %v", err)) + } + + // Switch on the level + switch level { + case zerolog.TraceLevel: + if !colored { + // No coloring for "trace" string + return zerolog.LevelTraceValue + } + // Return a bold, cyan "trace" string + return colors.CyanBold(zerolog.LevelTraceValue) + case zerolog.DebugLevel: + if !colored { + // No coloring for "debug" string + return zerolog.LevelDebugValue + } + // Return a bold, blue "debug" string + return colors.BlueBold(zerolog.LevelDebugValue) + case zerolog.InfoLevel: + if !colored { + // Return a left arrow without any coloring + return colors.LEFT_ARROW + } + // Return a bold, green left arrow + return colors.GreenBold(colors.LEFT_ARROW) + case zerolog.WarnLevel: + if !colored { + // No coloring for "warn" string + return zerolog.LevelWarnValue + } + // Return a bold, yellow "warn" string + return colors.YellowBold(zerolog.LevelWarnValue) + case zerolog.ErrorLevel: + if !colored { + // No coloring for "err" string + return zerolog.LevelErrorValue + } + // Return a bold, red "err" string + return colors.RedBold(zerolog.LevelErrorValue) + case zerolog.FatalLevel: + if !colored { + // No coloring for "fatal" string + return zerolog.LevelFatalValue + } + // Return a bold, red "fatal" string + return colors.RedBold(zerolog.LevelFatalValue) + case zerolog.PanicLevel: + if !colored { + // No coloring for "panic" string + return zerolog.LevelPanicValue + } + // Return a bold, red "panic" string + return colors.RedBold(zerolog.LevelPanicValue) + default: + return i.(string) + } + } + + return consoleWriter +} diff --git a/logging/logger_test.go b/logging/logger_test.go new file mode 100644 index 00000000..f613fcdf --- /dev/null +++ b/logging/logger_test.go @@ -0,0 +1,76 @@ +package logging + +import ( + "bytes" + "fmt" + "os" + "strings" + "testing" + + "github.com/crytic/medusa/logging/colors" + "github.com/rs/zerolog" + "github.com/stretchr/testify/assert" +) + +// TestAddAndRemoveWriter will test to Logger.AddWriter and Logger.RemoveWriter functions to ensure that they work as expected. +func TestAddAndRemoveWriter(t *testing.T) { + // Create a base logger + logger := NewLogger(zerolog.InfoLevel) + + // Add three types of writers + // 1. Unstructured and colorized output to stdout + logger.AddWriter(os.Stdout, UNSTRUCTURED, true) + // 2. Unstructured and non-colorized output to stderr + logger.AddWriter(os.Stderr, UNSTRUCTURED, false) + // 3. Structured output to stdin + logger.AddWriter(os.Stdin, STRUCTURED, false) + + // We should expect the underlying data structures are correctly updated + assert.Equal(t, len(logger.unstructuredWriters), 1) + assert.Equal(t, len(logger.unstructuredColorWriters), 1) + assert.Equal(t, len(logger.structuredWriters), 1) + + // Try to add duplicate writers + logger.AddWriter(os.Stdout, UNSTRUCTURED, true) + logger.AddWriter(os.Stderr, UNSTRUCTURED, false) + logger.AddWriter(os.Stdin, STRUCTURED, false) + + // Ensure that the lengths of the lists have not changed + assert.Equal(t, len(logger.unstructuredWriters), 1) + assert.Equal(t, len(logger.unstructuredColorWriters), 1) + assert.Equal(t, len(logger.structuredWriters), 1) + + // Remove each writer + logger.RemoveWriter(os.Stdout, UNSTRUCTURED, true) + logger.RemoveWriter(os.Stderr, UNSTRUCTURED, false) + logger.RemoveWriter(os.Stdin, STRUCTURED, false) + + // We should expect the underlying data structures are correctly updated + assert.Equal(t, len(logger.unstructuredWriters), 0) + assert.Equal(t, len(logger.unstructuredColorWriters), 0) + assert.Equal(t, len(logger.structuredWriters), 0) +} + +// TestDisabledColors verifies the behavior of the unstructured colored logger when colors are disabled, +// ensuring that it does not output colors when the color feature is turned off. +func TestDisabledColors(t *testing.T) { + // Create a base logger + logger := NewLogger(zerolog.InfoLevel) + + // Add colorized logger + var buf bytes.Buffer + logger.AddWriter(&buf, UNSTRUCTURED, true) + + // We should expect the underlying data structures are correctly updated + assert.Equal(t, len(logger.unstructuredColorWriters), 1) + + // Disable colors and log msg + colors.DisableColor() + logger.Info("foo") + + // Ensure that msg doesn't include colors afterwards (it is bolded) + prefix := fmt.Sprintf("%s \033[1m%s\033[0m", colors.LEFT_ARROW, "foo") + + _, _, ok := strings.Cut(buf.String(), prefix) + assert.True(t, ok) +} diff --git a/main.go b/main.go index 2c8c7295..b0bb8eaf 100644 --- a/main.go +++ b/main.go @@ -3,6 +3,7 @@ package main import ( "fmt" "github.com/crytic/medusa/cmd" + "github.com/crytic/medusa/cmd/exitcodes" "os" ) @@ -10,10 +11,17 @@ func main() { // Run our root CLI command, which contains all underlying command logic and will handle parsing/invocation. err := cmd.Execute() - // Print any error we encountered - if err != nil { - // TODO: Replace this when we have an appropriate logger in place. - fmt.Printf("ERROR:\n%s", err.Error()) - os.Exit(1) + // Obtain the actual error and exit code from the error, if any. + var exitCode int + err, exitCode = exitcodes.GetInnerErrorAndExitCode(err) + + // If we have an error, print it. + if err != nil && exitCode != exitcodes.ExitCodeHandledError { + fmt.Println(err) + } + + // If we have a non-success exit code, exit with it. + if exitCode != exitcodes.ExitCodeSuccess { + os.Exit(exitCode) } } diff --git a/scripts/corpus_diff.py b/scripts/corpus_diff.py new file mode 100644 index 00000000..b622f212 --- /dev/null +++ b/scripts/corpus_diff.py @@ -0,0 +1,63 @@ +import os +import json +import sys + +def load_json_files_from_subdirectory(subdirectory): + json_data = [] + for root, _, files in os.walk(subdirectory): + for file in files: + if file.endswith('.json'): + with open(os.path.join(root, file), 'r') as f: + data = json.load(f) + json_data.extend(data) + return json_data + +def extract_unique_methods(transactions): + unique_methods = set() + for tx in transactions: + call_data = tx.get('call', {}) + data_abi_values = call_data.get('dataAbiValues', {}) + method_signature = data_abi_values.get('methodSignature', '') + if method_signature: + unique_methods.add(method_signature) + return unique_methods + +def compare_methods(subdirectory1, subdirectory2): + transactions1 = load_json_files_from_subdirectory(subdirectory1) + transactions2 = load_json_files_from_subdirectory(subdirectory2) + + unique_methods1 = extract_unique_methods(transactions1) + unique_methods2 = extract_unique_methods(transactions2) + + only_in_subdir1 = unique_methods1 - unique_methods2 + only_in_subdir2 = unique_methods2 - unique_methods1 + + return only_in_subdir1, only_in_subdir2 + +def main(subdirectory1, subdirectory2): + + only_in_subdir1, only_in_subdir2 = compare_methods(subdirectory1, subdirectory2) + + print(f"Methods only in {subdirectory1}:") + if len(only_in_subdir1) == 0: + print(" ") + else: + for method in only_in_subdir1: + print(f"- {method}") + print("\n") + + + print(f"Methods only in {subdirectory2}:") + if len(only_in_subdir2) == 0: + print(" ") + else: + for method in only_in_subdir2: + print(f"- {method}") + print("\n") + +if __name__ == '__main__': + if len(sys.argv) != 3: + print("Usage: python3 unique.py ") + print("Compares the unique methods in the two given corpora.") + sys.exit(1) + main(sys.argv[1], sys.argv[2]) diff --git a/scripts/corpus_stats.py b/scripts/corpus_stats.py new file mode 100644 index 00000000..a5c818f8 --- /dev/null +++ b/scripts/corpus_stats.py @@ -0,0 +1,57 @@ +import os +import json +from collections import Counter +import sys + +def load_json_files_from_subdirectory(subdirectory): + json_data = [] + for root, _, files in os.walk(subdirectory): + for file in files: + if file.endswith('.json'): + with open(os.path.join(root, file), 'r') as f: + data = json.load(f) + json_data.append(data) + return json_data + + +def analyze_transactions(transactions, method_counter): + + for tx in transactions: + call_data = tx.get('call', {}) + data_abi_values = call_data.get('dataAbiValues', {}) + method_signature = data_abi_values.get('methodSignature', '') + + method_counter[method_signature] += 1 + + + +def main(subdirectory): + transaction_seqs = load_json_files_from_subdirectory(subdirectory) + + method_counter = Counter() + total_length = 0 + + for seq in transaction_seqs: + analyze_transactions(seq, method_counter) + total_length += len(seq) + + average_length = total_length // len(transaction_seqs) + + print(f"Number of Sequences in {subdirectory}: {len(transaction_seqs)}") + print("\n") + + print(f"Average Length of Transactions List: {average_length}") + print("\n") + print("Frequency of Methods Called:") + for method, count in method_counter.most_common(): + print(f"- {method}: {count}") + print("\n") + print(f"Number of Unique Methods: {len(method_counter)}") + print("\n") + +if __name__ == '__main__': + if len(sys.argv) != 2: + print("Usage: python3 corpus_stats.py ") + print("Computes statistics on the transactions in the given corpus.") + sys.exit(1) + main(sys.argv[1]) diff --git a/utils/combinatorial_utils.go b/utils/combinatorial_utils.go new file mode 100644 index 00000000..7eea59d4 --- /dev/null +++ b/utils/combinatorial_utils.go @@ -0,0 +1,46 @@ +package utils + +// PermutationsWithRepetition will take in an array and an integer, n, where n represents how many items need to +// be selected from the array. The function returns an array of all permutations of size n +func PermutationsWithRepetition[T any](choices []T, n int) [][]T { + numChoices := len(choices) + + // At each iteration of the for loop below, one of the indices in counter + // increments by one. Here is what selector looks like over a few iterations + // [0, 0, 0, 0] -> [1, 0, 0, 0] -> ... -> [2, 1, 0, 0] -> ... -> [4, 3, 1, 0] and so on until we reach back to + // [0, 0, 0, 0] which means all permutations have been enumerated. + counter := make([]int, n) + permutations := make([][]T, 0) + for { + // The counter will determine the order of the current permutation. The i-th value of the permutation is equal to + // the x-th index in the choices array. + permutation := make([]T, n) + for i, x := range counter { + permutation[i] = choices[x] + } + + // Add the permutation to the list of permutations + permutations = append(permutations, permutation) + + // This for loop will determine the next value of the counter array + for i := 0; ; { + // Increment the i-th index + counter[i]++ + // If we haven't updated the i-th index of counter up to numChoices - 1, we increment that index + if counter[i] < numChoices { + break + } + + // Once the i-th index is equal to numChoices, we reset counter[i] back to 0 and move on to the next index + // with i++ + counter[i] = 0 + i++ + + // Once we reach the length of the counter array, we are done with enumerating all permutations since all + // indices in the counter array have been reset back to 0 + if i == n { + return permutations + } + } + } +} diff --git a/utils/crypto_utils.go b/utils/crypto_utils.go new file mode 100644 index 00000000..0144e51d --- /dev/null +++ b/utils/crypto_utils.go @@ -0,0 +1,24 @@ +package utils + +import ( + "crypto/ecdsa" + "github.com/ethereum/go-ethereum/crypto" + "github.com/pkg/errors" +) + +// GetPrivateKey will return a private key object given a byte slice. Only slices between lengths 1 and 32 (inclusive) +// are valid. +func GetPrivateKey(b []byte) (*ecdsa.PrivateKey, error) { + // Make sure that private key is not zero + if len(b) < 1 || len(b) > 32 { + return nil, errors.New("invalid private key") + } + + // Then pad the private key slice to a fixed 32-byte array + paddedPrivateKey := make([]byte, 32) + copy(paddedPrivateKey[32-len(b):], b) + + // Next we will actually retrieve the private key object + privateKey, err := crypto.ToECDSA(paddedPrivateKey[:]) + return privateKey, errors.WithStack(err) +} diff --git a/utils/fs_utils.go b/utils/fs_utils.go index 129b76ef..5a68150e 100644 --- a/utils/fs_utils.go +++ b/utils/fs_utils.go @@ -2,11 +2,38 @@ package utils import ( "fmt" + "github.com/pkg/errors" "io" "os" "path/filepath" ) +// CreateFile will create a file at the given path and file name combination. If the path is the empty string, the +// file will be created in the current working directory +func CreateFile(path string, fileName string) (*os.File, error) { + // By default, the path will be the name of the file + filePath := fileName + + // Check to see if the file needs to be created in another directory or the working directory + if path != "" { + // Make the directory, if it does not exist already + err := MakeDirectory(path) + if err != nil { + return nil, err + } + // Since the path is non-empty, concatenate the path with the name of the file + filePath = filepath.Join(path, fileName) + } + + // Create the file + file, err := os.Create(filePath) + if err != nil { + return nil, errors.WithStack(err) + } + + return file, nil +} + // CopyFile copies a file from a source path to a destination path. File permissions are retained. Returns an error // if one occurs. func CopyFile(sourcePath string, targetPath string) error { @@ -52,6 +79,35 @@ func CopyFile(sourcePath string, targetPath string) error { return os.Chmod(targetPath, sourceInfo.Mode()) } +// MoveFile will move a given file from the source path to the target path. Returns an error if one occured. +func MoveFile(sourcePath string, targetPath string) error { + // Obtain file info for the source file + sourceInfo, err := os.Stat(sourcePath) + if err != nil { + return err + } + + // If the path refers to a directory, return an error + if sourceInfo.IsDir() { + return fmt.Errorf("could not copy file from '%s' to '%s' because the source path refers to a directory", sourcePath, targetPath) + } + + // Ensure the existence of the directory we wish to copy to. + targetDirectory := filepath.Dir(targetPath) + err = os.MkdirAll(targetDirectory, 0777) + if err != nil { + return err + } + + // Move the file from the source path to the target path + err = os.Rename(sourcePath, targetPath) + if err != nil { + return err + } + + return nil +} + // GetFileNameWithoutExtension obtains a filename without the extension. This does not contain any preceding directory // paths. func GetFileNameWithoutExtension(filePath string) string { @@ -63,14 +119,15 @@ func GetFilePathWithoutExtension(filePath string) string { return filePath[:len(filePath)-len(filepath.Ext(filePath))] } -// MakeDirectory creates a directory at the given path +// MakeDirectory creates a directory at the given path, including any parent directories which do not exist. +// Returns an error, if one occurred. func MakeDirectory(dirToMake string) error { dirInfo, err := os.Stat(dirToMake) if err != nil { // Directory does not exist, as expected. if os.IsNotExist(err) { // TODO: Permissions are way too much but even 666 is not working - err = os.Mkdir(dirToMake, 0777) + err = os.MkdirAll(dirToMake, 0777) if err != nil { return err } diff --git a/utils/integer_utils.go b/utils/integer_utils.go index 8b097901..ef1cf5b8 100644 --- a/utils/integer_utils.go +++ b/utils/integer_utils.go @@ -1,8 +1,9 @@ package utils import ( - "golang.org/x/exp/constraints" "math/big" + + "golang.org/x/exp/constraints" ) // ConstrainIntegerToBounds takes a provided big integer and minimum/maximum bounds (inclusive) and ensures diff --git a/utils/message_transaction_utils.go b/utils/message_transaction_utils.go index cfd54b25..57fd68ed 100644 --- a/utils/message_transaction_utils.go +++ b/utils/message_transaction_utils.go @@ -6,13 +6,13 @@ import ( ) // MessageToTransaction derives a types.Transaction from a types.Message. -func MessageToTransaction(msg core.Message) *types.Transaction { +func MessageToTransaction(msg *core.Message) *types.Transaction { return types.NewTx(&types.LegacyTx{ - Nonce: msg.Nonce(), - GasPrice: msg.GasPrice(), - Gas: msg.Gas(), - To: msg.To(), - Value: msg.Value(), - Data: msg.Data(), + Nonce: msg.Nonce, + GasPrice: msg.GasPrice, + Gas: msg.GasLimit, + To: msg.To, + Value: msg.Value, + Data: msg.Data, }) } diff --git a/utils/randomutils/fork_random.go b/utils/randomutils/fork_random.go index 071da2d3..192b68d6 100644 --- a/utils/randomutils/fork_random.go +++ b/utils/randomutils/fork_random.go @@ -2,6 +2,7 @@ package randomutils import ( "encoding/binary" + "github.com/crytic/medusa/logging" "math/rand" ) @@ -13,7 +14,7 @@ func ForkRandomProvider(randomProvider *rand.Rand) *rand.Rand { b := make([]byte, 8) _, err := randomProvider.Read(b) if err != nil { - panic(err) + logging.GlobalLogger.Panic("Failed to fork random child provider", err) } // Return a new random provider with our derived seed. diff --git a/utils/reflectionutils/reflected_type_utils.go b/utils/reflectionutils/reflected_type_utils.go index a3ffd48e..34e277b5 100644 --- a/utils/reflectionutils/reflected_type_utils.go +++ b/utils/reflectionutils/reflected_type_utils.go @@ -2,6 +2,7 @@ package reflectionutils import ( "fmt" + "github.com/crytic/medusa/logging" "reflect" ) @@ -54,7 +55,9 @@ func CopyReflectedType(reflectedValue reflect.Value) reflect.Value { } return newStruct } - panic("failed to copy reflected value, type not supported") + + logging.GlobalLogger.Panic("Failed to copy reflected value", fmt.Errorf("type not supported")) + return reflectedValue } // GetReflectedArrayValues obtains the values of each element of a reflected array or slice variable. @@ -71,7 +74,9 @@ func GetReflectedArrayValues(reflectedArray reflect.Value) []any { } return values } - panic("failed to get reflected array values, type not supported") + + logging.GlobalLogger.Panic("Failed to get reflected array values", fmt.Errorf("type not supported")) + return nil } // SetReflectedArrayValues takes an array or slice of the same length as the values provided, and sets each element @@ -93,5 +98,7 @@ func SetReflectedArrayValues(reflectedArray reflect.Value, values []any) error { } return nil } - panic("failed to set reflected array values, type not supported") + + logging.GlobalLogger.Panic("Failed to set reflected array values", fmt.Errorf("type not supported")) + return nil }