diff --git a/.cicd/build.sh b/.cicd/build.sh index 8c2aa4c6ed2..a8c645fd904 100755 --- a/.cicd/build.sh +++ b/.cicd/build.sh @@ -2,16 +2,11 @@ set -eo pipefail . ./.cicd/helpers/general.sh mkdir -p $BUILD_DIR -CMAKE_EXTRAS="-DCMAKE_BUILD_TYPE='Release' -DENABLE_MULTIVERSION_PROTOCOL_TEST=true" +CMAKE_EXTRAS="-DCMAKE_BUILD_TYPE='Release' -DENABLE_MULTIVERSION_PROTOCOL_TEST=true -DBUILD_MONGO_DB_PLUGIN=true" if [[ "$(uname)" == 'Darwin' ]]; then # You can't use chained commands in execute - if [[ "$TRAVIS" == 'true' ]]; then + if [[ "$GITHUB_ACTIONS" == 'true' ]]; then export PINNED=false - brew reinstall openssl@1.1 # Fixes issue where builds in Travis cannot find libcrypto. - ccache -s - CMAKE_EXTRAS="$CMAKE_EXTRAS -DCMAKE_CXX_COMPILER_LAUNCHER=ccache" - else - CMAKE_EXTRAS="$CMAKE_EXTRAS -DBUILD_MONGO_DB_PLUGIN=true" fi [[ ! "$PINNED" == 'false' ]] && CMAKE_EXTRAS="$CMAKE_EXTRAS -DCMAKE_TOOLCHAIN_FILE=$HELPERS_DIR/clang.make" cd $BUILD_DIR @@ -20,28 +15,17 @@ if [[ "$(uname)" == 'Darwin' ]]; then echo "make -j$JOBS" make -j$JOBS else # Linux - CMAKE_EXTRAS="$CMAKE_EXTRAS -DBUILD_MONGO_DB_PLUGIN=true" ARGS=${ARGS:-"--rm --init -v $(pwd):$MOUNTED_DIR"} PRE_COMMANDS="cd $MOUNTED_DIR/build" # PRE_COMMANDS: Executed pre-cmake # CMAKE_EXTRAS: Executed within and right before the cmake path (cmake CMAKE_EXTRAS ..) - [[ ! "$IMAGE_TAG" =~ 'unpinned' ]] && CMAKE_EXTRAS="$CMAKE_EXTRAS -DCMAKE_TOOLCHAIN_FILE=$MOUNTED_DIR/.cicd/helpers/clang.make -DCMAKE_CXX_COMPILER_LAUNCHER=ccache" - if [[ "$IMAGE_TAG" == 'amazon_linux-2-pinned' ]]; then - PRE_COMMANDS="$PRE_COMMANDS && export PATH=/usr/lib64/ccache:\\\$PATH" - elif [[ "$IMAGE_TAG" == 'centos-7.7-pinned' ]]; then - PRE_COMMANDS="$PRE_COMMANDS && export PATH=/usr/lib64/ccache:\\\$PATH" - elif [[ "$IMAGE_TAG" == 'ubuntu-16.04-pinned' ]]; then - PRE_COMMANDS="$PRE_COMMANDS && export PATH=/usr/lib/ccache:\\\$PATH" - elif [[ "$IMAGE_TAG" == 'ubuntu-18.04-pinned' ]]; then - PRE_COMMANDS="$PRE_COMMANDS && export PATH=/usr/lib/ccache:\\\$PATH" - elif [[ "$IMAGE_TAG" == 'amazon_linux-2-unpinned' ]]; then - PRE_COMMANDS="$PRE_COMMANDS && export PATH=/usr/lib64/ccache:\\\$PATH" + [[ ! "$IMAGE_TAG" =~ 'unpinned' ]] && CMAKE_EXTRAS="$CMAKE_EXTRAS -DCMAKE_TOOLCHAIN_FILE=$MOUNTED_DIR/.cicd/helpers/clang.make" + if [[ "$IMAGE_TAG" == 'amazon_linux-2-unpinned' ]]; then CMAKE_EXTRAS="$CMAKE_EXTRAS -DCMAKE_CXX_COMPILER='clang++' -DCMAKE_C_COMPILER='clang'" elif [[ "$IMAGE_TAG" == 'centos-7.7-unpinned' ]]; then - PRE_COMMANDS="$PRE_COMMANDS && source /opt/rh/devtoolset-8/enable && source /opt/rh/rh-python36/enable && export PATH=/usr/lib64/ccache:\\\$PATH" + PRE_COMMANDS="$PRE_COMMANDS && source /opt/rh/devtoolset-8/enable && source /opt/rh/rh-python36/enable" CMAKE_EXTRAS="$CMAKE_EXTRAS -DLLVM_DIR='/opt/rh/llvm-toolset-7.0/root/usr/lib64/cmake/llvm'" elif [[ "$IMAGE_TAG" == 'ubuntu-18.04-unpinned' ]]; then - PRE_COMMANDS="$PRE_COMMANDS && export PATH=/usr/lib/ccache:\\\$PATH" CMAKE_EXTRAS="$CMAKE_EXTRAS -DCMAKE_CXX_COMPILER='clang++-7' -DCMAKE_C_COMPILER='clang-7' -DLLVM_DIR='/usr/lib/llvm-7/lib/cmake/llvm'" fi BUILD_COMMANDS="cmake $CMAKE_EXTRAS .. && make -j$JOBS" @@ -52,9 +36,9 @@ else # Linux [[ "$ENABLE_INSTALL" == 'true' ]] && COMMANDS="cp -r $MOUNTED_DIR /root/eosio && cd /root/eosio/build &&" COMMANDS="$COMMANDS $BUILD_COMMANDS" [[ "$ENABLE_INSTALL" == 'true' ]] && COMMANDS="$COMMANDS && make install" - elif [[ "$TRAVIS" == 'true' ]]; then - ARGS="$ARGS -v /usr/lib/ccache -v $HOME/.ccache:/opt/.ccache -e JOBS -e TRAVIS -e CCACHE_DIR=/opt/.ccache" - COMMANDS="ccache -s && $BUILD_COMMANDS" + elif [[ "$GITHUB_ACTIONS" == 'true' ]]; then + ARGS="$ARGS -e JOBS" + COMMANDS="$BUILD_COMMANDS" fi . $HELPERS_DIR/file-hash.sh $CICD_DIR/platforms/$PLATFORM_TYPE/$IMAGE_TAG.dockerfile COMMANDS="$PRE_COMMANDS && $COMMANDS" diff --git a/.cicd/generate-pipeline.sh b/.cicd/generate-pipeline.sh index 3072f0d4b34..a8c239684a9 100755 --- a/.cicd/generate-pipeline.sh +++ b/.cicd/generate-pipeline.sh @@ -484,6 +484,7 @@ if [[ "$BUILDKITE_PIPELINE_SLUG" == 'eosio' && -z "${SKIP_INSTALL}${SKIP_LINUX}$ - label: ":chains: Sync from Genesis Test" trigger: "eosio-sync-from-genesis" async: false + if: build.env("BUILDKITE_TAG") == null build: message: "Triggered by $BUILDKITE_PIPELINE_SLUG build $BUILDKITE_BUILD_NUMBER" commit: "${BUILDKITE_COMMIT}" @@ -503,6 +504,7 @@ if [[ "$BUILDKITE_PIPELINE_SLUG" == 'eosio' && -z "${SKIP_INSTALL}${SKIP_LINUX}$ - label: ":outbox_tray: Resume from State Test" trigger: "eosio-resume-from-state" async: false + if: build.env("BUILDKITE_TAG") == null build: message: "Triggered by $BUILDKITE_PIPELINE_SLUG build $BUILDKITE_BUILD_NUMBER" commit: "${BUILDKITE_COMMIT}" diff --git a/.cicd/platforms/pinned/amazon_linux-2-pinned.dockerfile b/.cicd/platforms/pinned/amazon_linux-2-pinned.dockerfile index 1344fd698bd..7611cdd62ac 100644 --- a/.cicd/platforms/pinned/amazon_linux-2-pinned.dockerfile +++ b/.cicd/platforms/pinned/amazon_linux-2-pinned.dockerfile @@ -78,6 +78,4 @@ RUN curl -L https://github.com/mongodb/mongo-cxx-driver/archive/r3.4.0.tar.gz -o cd / && \ rm -rf mongo-cxx-driver-r3.4.0.tar.gz /mongo-cxx-driver-r3.4.0 # add mongodb to path -ENV PATH=${PATH}:/mongodb-linux-x86_64-amazon-3.6.3/bin -# install ccache -RUN yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm && yum install -y ccache \ No newline at end of file +ENV PATH=${PATH}:/mongodb-linux-x86_64-amazon-3.6.3/bin \ No newline at end of file diff --git a/.cicd/platforms/pinned/centos-7.7-pinned.dockerfile b/.cicd/platforms/pinned/centos-7.7-pinned.dockerfile index 53f27c3be29..2d52a765584 100644 --- a/.cicd/platforms/pinned/centos-7.7-pinned.dockerfile +++ b/.cicd/platforms/pinned/centos-7.7-pinned.dockerfile @@ -87,9 +87,4 @@ RUN curl -L https://github.com/mongodb/mongo-cxx-driver/archive/r3.4.0.tar.gz -o cd / && \ rm -rf mongo-cxx-driver-r3.4.0.tar.gz /mongo-cxx-driver-r3.4.0 # add mongodb to path -ENV PATH=${PATH}:/mongodb-linux-x86_64-amazon-3.6.3/bin -# install ccache -RUN yum install -y ccache -# fix ccache for centos -RUN cd /usr/lib64/ccache && ln -s ../../bin/ccache c++ -ENV CCACHE_PATH="/opt/rh/devtoolset-8/root/usr/bin" \ No newline at end of file +ENV PATH=${PATH}:/mongodb-linux-x86_64-amazon-3.6.3/bin \ No newline at end of file diff --git a/.cicd/platforms/pinned/macos-10.14-pinned.sh b/.cicd/platforms/pinned/macos-10.14-pinned.sh index 99af86b61d0..40507b65ca9 100755 --- a/.cicd/platforms/pinned/macos-10.14-pinned.sh +++ b/.cicd/platforms/pinned/macos-10.14-pinned.sh @@ -2,7 +2,7 @@ set -eo pipefail VERSION=1 brew update -brew install git cmake python@2 python libtool libusb graphviz automake wget gmp llvm@7 pkgconfig doxygen openssl@1.1 jq || : +brew install git cmake python libtool libusb graphviz automake wget gmp llvm@7 pkgconfig doxygen openssl@1.1 jq || : # install clang from source git clone --single-branch --branch release_80 https://git.llvm.org/git/llvm.git clang8 cd clang8 diff --git a/.cicd/platforms/pinned/ubuntu-16.04-pinned.dockerfile b/.cicd/platforms/pinned/ubuntu-16.04-pinned.dockerfile index a8e4743f009..9682578d761 100644 --- a/.cicd/platforms/pinned/ubuntu-16.04-pinned.dockerfile +++ b/.cicd/platforms/pinned/ubuntu-16.04-pinned.dockerfile @@ -79,13 +79,4 @@ RUN curl -L https://github.com/mongodb/mongo-cxx-driver/archive/r3.4.0.tar.gz -o cd / && \ rm -rf mongo-cxx-driver-r3.4.0.tar.gz /mongo-cxx-driver-r3.4.0 # add mongodb to path -ENV PATH=${PATH}:/mongodb-linux-x86_64-ubuntu1604-3.6.3/bin -# install ccache -RUN curl -LO https://github.com/ccache/ccache/releases/download/v3.4.1/ccache-3.4.1.tar.gz && \ - tar -xzf ccache-3.4.1.tar.gz && \ - cd ccache-3.4.1 && \ - ./configure && \ - make && \ - make install && \ - cd / && \ - rm -rf ccache-3.4.1.tar.gz /ccache-3.4.1 \ No newline at end of file +ENV PATH=${PATH}:/mongodb-linux-x86_64-ubuntu1604-3.6.3/bin \ No newline at end of file diff --git a/.cicd/platforms/pinned/ubuntu-18.04-pinned.dockerfile b/.cicd/platforms/pinned/ubuntu-18.04-pinned.dockerfile index 2712342e28d..9b166645ced 100644 --- a/.cicd/platforms/pinned/ubuntu-18.04-pinned.dockerfile +++ b/.cicd/platforms/pinned/ubuntu-18.04-pinned.dockerfile @@ -8,7 +8,7 @@ RUN apt-get update && \ autotools-dev libicu-dev python2.7 python2.7-dev python3 \ python3-dev python-configparser python-requests python-pip \ autoconf libtool g++ gcc curl zlib1g-dev sudo ruby libusb-1.0-0-dev \ - libcurl4-gnutls-dev pkg-config patch ccache vim-common jq + libcurl4-gnutls-dev pkg-config patch vim-common jq # build cmake. RUN curl -LO https://cmake.org/files/v3.13/cmake-3.13.2.tar.gz && \ tar -xzf cmake-3.13.2.tar.gz && \ diff --git a/.cicd/platforms/unpinned/amazon_linux-2-unpinned.dockerfile b/.cicd/platforms/unpinned/amazon_linux-2-unpinned.dockerfile index 82c4c5c3d02..00248b8ac44 100644 --- a/.cicd/platforms/unpinned/amazon_linux-2-unpinned.dockerfile +++ b/.cicd/platforms/unpinned/amazon_linux-2-unpinned.dockerfile @@ -51,6 +51,4 @@ RUN curl -L https://github.com/mongodb/mongo-cxx-driver/archive/r3.4.0.tar.gz -o cd / && \ rm -rf mongo-cxx-driver-r3.4.0.tar.gz /mongo-cxx-driver-r3.4.0 # add mongodb to path -ENV PATH=${PATH}:/mongodb-linux-x86_64-amazon-3.6.3/bin -# install ccache -RUN yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm && yum install -y ccache \ No newline at end of file +ENV PATH=${PATH}:/mongodb-linux-x86_64-amazon-3.6.3/bin \ No newline at end of file diff --git a/.cicd/platforms/unpinned/centos-7.7-unpinned.dockerfile b/.cicd/platforms/unpinned/centos-7.7-unpinned.dockerfile index dcef2d0eeee..d246dbd5063 100644 --- a/.cicd/platforms/unpinned/centos-7.7-unpinned.dockerfile +++ b/.cicd/platforms/unpinned/centos-7.7-unpinned.dockerfile @@ -62,9 +62,4 @@ RUN curl -L https://github.com/mongodb/mongo-cxx-driver/archive/r3.4.0.tar.gz -o cd / && \ rm -rf mongo-cxx-driver-r3.4.0.tar.gz /mongo-cxx-driver-r3.4.0 # add mongodb to path -ENV PATH=${PATH}:/mongodb-linux-x86_64-amazon-3.6.3/bin -# install ccache -RUN yum install -y ccache -# fix ccache for centos -RUN cd /usr/lib64/ccache && ln -s ../../bin/ccache c++ -ENV CCACHE_PATH="/opt/rh/devtoolset-8/root/usr/bin" \ No newline at end of file +ENV PATH=${PATH}:/mongodb-linux-x86_64-amazon-3.6.3/bin \ No newline at end of file diff --git a/.cicd/platforms/unpinned/macos-10.14-unpinned.sh b/.cicd/platforms/unpinned/macos-10.14-unpinned.sh index 069e8162e4a..a3cedb1ceb2 100755 --- a/.cicd/platforms/unpinned/macos-10.14-unpinned.sh +++ b/.cicd/platforms/unpinned/macos-10.14-unpinned.sh @@ -2,7 +2,7 @@ set -eo pipefail VERSION=1 brew update -brew install git cmake python@2 python libtool libusb graphviz automake wget gmp llvm@7 pkgconfig doxygen openssl@1.1 jq boost || : +brew install git cmake python libtool libusb graphviz automake wget gmp llvm@7 pkgconfig doxygen openssl@1.1 jq boost || : # install mongoDB cd ~ curl -OL https://fastdl.mongodb.org/osx/mongodb-osx-ssl-x86_64-3.6.3.tgz diff --git a/.cicd/platforms/unpinned/ubuntu-18.04-unpinned.dockerfile b/.cicd/platforms/unpinned/ubuntu-18.04-unpinned.dockerfile index 22e4cb0e0de..e50343cc06b 100644 --- a/.cicd/platforms/unpinned/ubuntu-18.04-unpinned.dockerfile +++ b/.cicd/platforms/unpinned/ubuntu-18.04-unpinned.dockerfile @@ -7,7 +7,7 @@ RUN apt-get update && \ bzip2 automake libbz2-dev libssl-dev doxygen graphviz libgmp3-dev \ autotools-dev libicu-dev python2.7 python2.7-dev python3 python3-dev \ autoconf libtool curl zlib1g-dev sudo ruby libusb-1.0-0-dev \ - libcurl4-gnutls-dev pkg-config patch llvm-7-dev clang-7 ccache vim-common jq + libcurl4-gnutls-dev pkg-config patch llvm-7-dev clang-7 vim-common jq # build cmake. RUN curl -LO https://cmake.org/files/v3.13/cmake-3.13.2.tar.gz && \ tar -xzf cmake-3.13.2.tar.gz && \ diff --git a/.cicd/submodule-regression-check.sh b/.cicd/submodule-regression-check.sh index 47b4bcacc4b..80999067204 100755 --- a/.cicd/submodule-regression-check.sh +++ b/.cicd/submodule-regression-check.sh @@ -2,16 +2,15 @@ set -eo pipefail declare -A PR_MAP declare -A BASE_MAP -# Support Travis and BK -if ${TRAVIS:-false}; then - [[ -z $TRAVIS_PULL_REQUEST_BRANCH ]] && echo "Unable to find TRAVIS_PULL_REQUEST_BRANCH ENV. Skipping submodule regression check." && exit 0 - BASE_BRANCH=$TRAVIS_BRANCH - CURRENT_BRANCH=$TRAVIS_PULL_REQUEST_BRANCH - [[ ! -z $TRAVIS_PULL_REQUEST_SLUG ]] && CURRENT_BRANCH=$TRAVIS_COMMIT # When we're not running from a PR, the slug is not set. When we are, we need to use the TRAVIS_COMMIT to be sure we're supporting the Forked PR's merge/code that's in the EOS repo. This is needed for the git log below. -else + +if [[ $BUILDKITE == true ]]; then [[ -z $BUILDKITE_PULL_REQUEST_BASE_BRANCH ]] && echo "Unable to find BUILDKITE_PULL_REQUEST_BASE_BRANCH ENV. Skipping submodule regression check." && exit 0 BASE_BRANCH=$BUILDKITE_PULL_REQUEST_BASE_BRANCH CURRENT_BRANCH=$BUILDKITE_BRANCH +else + [[ -z $GITHUB_BASE_REF ]] && echo "Cannot find \$GITHUB_BASE_REF, so we have nothing to compare submodules to. Skipping submodule regression check." && exit 0 + BASE_BRANCH=$GITHUB_BASE_REF + CURRENT_BRANCH=$GITHUB_SHA fi echo "getting submodule info for $CURRENT_BRANCH" @@ -27,17 +26,15 @@ while read -r a b; do done < <(git submodule --quiet foreach --recursive 'echo $path `git log -1 --format=%ct`') # We need to switch back to the PR ref/head so we can git log properly -if [[ $TRAVIS == true && ! -z $TRAVIS_PULL_REQUEST_SLUG ]]; then - echo "git fetch origin +refs/pull/$TRAVIS_PULL_REQUEST/merge:" - git fetch origin +refs/pull/$TRAVIS_PULL_REQUEST/merge: 1> /dev/null - echo "switching back to $TRAVIS_COMMIT" - echo 'git checkout -qf FETCH_HEAD' - git checkout -qf FETCH_HEAD 1> /dev/null -elif [[ $BUILDKITE == true ]]; then - echo "switching back to $CURRENT_BRANCH" - git checkout -f $CURRENT_BRANCH 1> /dev/null +if [[ $BUILDKITE != true ]]; then + echo "git fetch origin +$GITHUB_REF:" + git fetch origin +${GITHUB_REF}: 1> /dev/null fi +echo "switching back to $CURRENT_BRANCH..." +echo "git checkout -qf $CURRENT_BRANCH" +git checkout -qf $CURRENT_BRANCH 1> /dev/null + for k in "${!BASE_MAP[@]}"; do base_ts=${BASE_MAP[$k]} pr_ts=${PR_MAP[$k]} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 00000000000..19fc006353c --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,436 @@ +name: Pull Request +on: [pull_request] + +jobs: + start-job: + name: Start Job + runs-on: ubuntu-latest + steps: + - name: Start Job. + run: echo "PR created. Builds will be triggered here for forked PRs or Buildkite for internal PRs." + + + submodule_regression_check: + if: github.event.pull_request.base.repo.id != github.event.pull_request.head.repo.id + name: Submodule Regression Check + runs-on: ubuntu-latest + needs: start-job + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Submodule Regression Check + run: ./.cicd/submodule-regression-check.sh + + + amazon_linux-2-build: + if: github.event.pull_request.base.repo.id != github.event.pull_request.head.repo.id + name: Amazon_Linux 2 | Build + runs-on: ubuntu-latest + needs: start-job + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Build + run: | + ./.cicd/build.sh + tar -pczf build.tar.gz build + env: + IMAGE_TAG: amazon_linux-2-pinned + PLATFORM_TYPE: pinned + - name: Upload Build Artifact + uses: actions/upload-artifact@v1 + with: + name: amazon_linux-2-build + path: build.tar.gz + amazon_linux-2-parallel-test: + name: Amazon_Linux 2 | Parallel Test + runs-on: ubuntu-latest + needs: amazon_linux-2-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: amazon_linux-2-build + - name: Parallel Test + run: | + tar -xzf amazon_linux-2-build/build.tar.gz + ./.cicd/test.sh scripts/parallel-test.sh + env: + IMAGE_TAG: amazon_linux-2-pinned + PLATFORM_TYPE: pinned + amazon_linux-2-wasm-test: + name: Amazon_Linux 2 | WASM Spec Test + runs-on: ubuntu-latest + needs: amazon_linux-2-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: amazon_linux-2-build + - name: WASM Spec Test + run: | + tar -xzf amazon_linux-2-build/build.tar.gz + ./.cicd/test.sh scripts/wasm-spec-test.sh + env: + IMAGE_TAG: amazon_linux-2-pinned + PLATFORM_TYPE: pinned + amazon_linux-2-serial-test: + name: Amazon_Linux 2 | Serial Test + runs-on: ubuntu-latest + needs: amazon_linux-2-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: amazon_linux-2-build + - name: Serial Test + run: | + tar -xzf amazon_linux-2-build/build.tar.gz + ./.cicd/test.sh scripts/serial-test.sh + env: + IMAGE_TAG: amazon_linux-2-pinned + PLATFORM_TYPE: pinned + + + centos-77-build: + if: github.event.pull_request.base.repo.id != github.event.pull_request.head.repo.id + name: CentOS 7.7 | Build + runs-on: ubuntu-latest + needs: start-job + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Build + run: | + ./.cicd/build.sh + tar -pczf build.tar.gz build + env: + IMAGE_TAG: centos-7.7-pinned + PLATFORM_TYPE: pinned + - name: Upload Build Artifact + uses: actions/upload-artifact@v1 + with: + name: centos-77-build + path: build.tar.gz + centos-77-parallel-test: + name: CentOS 7.7 | Parallel Test + runs-on: ubuntu-latest + needs: centos-77-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: centos-77-build + - name: Parallel Test + run: | + tar -xzf centos-77-build/build.tar.gz + ./.cicd/test.sh scripts/parallel-test.sh + env: + IMAGE_TAG: centos-7.7-pinned + PLATFORM_TYPE: pinned + centos-77-wasm-test: + name: CentOS 7.7 | WASM Spec Test + runs-on: ubuntu-latest + needs: centos-77-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: centos-77-build + - name: WASM Spec Test + run: | + tar -xzf centos-77-build/build.tar.gz + ./.cicd/test.sh scripts/wasm-spec-test.sh + env: + IMAGE_TAG: centos-7.7-pinned + PLATFORM_TYPE: pinned + centos-77-serial-test: + name: CentOS 7.7 | Serial Test + runs-on: ubuntu-latest + needs: centos-77-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: centos-77-build + - name: Serial Test + run: | + tar -xzf centos-77-build/build.tar.gz + ./.cicd/test.sh scripts/serial-test.sh + env: + IMAGE_TAG: centos-7.7-pinned + PLATFORM_TYPE: pinned + + + ubuntu-1604-build: + if: github.event.pull_request.base.repo.id != github.event.pull_request.head.repo.id + name: Ubuntu 16.04 | Build + runs-on: ubuntu-latest + needs: start-job + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Build + run: | + ./.cicd/build.sh + tar -pczf build.tar.gz build + env: + IMAGE_TAG: ubuntu-16.04-pinned + PLATFORM_TYPE: pinned + - name: Upload Build Artifact + uses: actions/upload-artifact@v1 + with: + name: ubuntu-1604-build + path: build.tar.gz + ubuntu-1604-parallel-test: + name: Ubuntu 16.04 | Parallel Test + runs-on: ubuntu-latest + needs: ubuntu-1604-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: ubuntu-1604-build + - name: Parallel Test + run: | + tar -xzf ubuntu-1604-build/build.tar.gz + ./.cicd/test.sh scripts/parallel-test.sh + env: + IMAGE_TAG: ubuntu-16.04-pinned + PLATFORM_TYPE: pinned + ubuntu-1604-wasm-test: + name: Ubuntu 16.04 | WASM Spec Test + runs-on: ubuntu-latest + needs: ubuntu-1604-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: ubuntu-1604-build + - name: WASM Spec Test + run: | + tar -xzf ubuntu-1604-build/build.tar.gz + ./.cicd/test.sh scripts/wasm-spec-test.sh + env: + IMAGE_TAG: ubuntu-16.04-pinned + PLATFORM_TYPE: pinned + ubuntu-1604-serial-test: + name: Ubuntu 16.04 | Serial Test + runs-on: ubuntu-latest + needs: ubuntu-1604-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: ubuntu-1604-build + - name: Serial Test + run: | + tar -xzf ubuntu-1604-build/build.tar.gz + ./.cicd/test.sh scripts/serial-test.sh + env: + IMAGE_TAG: ubuntu-16.04-pinned + PLATFORM_TYPE: pinned + + + ubuntu-1804-build: + if: github.event.pull_request.base.repo.id != github.event.pull_request.head.repo.id + name: Ubuntu 18.04 | Build + runs-on: ubuntu-latest + needs: start-job + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Build + run: | + ./.cicd/build.sh + tar -pczf build.tar.gz build + env: + IMAGE_TAG: ubuntu-18.04-pinned + PLATFORM_TYPE: pinned + - name: Upload Build Artifact + uses: actions/upload-artifact@v1 + with: + name: ubuntu-1804-build + path: build.tar.gz + ubuntu-1804-parallel-test: + name: Ubuntu 18.04 | Parallel Test + runs-on: ubuntu-latest + needs: ubuntu-1804-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: ubuntu-1804-build + - name: Parallel Test + run: | + tar -xzf ubuntu-1804-build/build.tar.gz + ./.cicd/test.sh scripts/parallel-test.sh + env: + IMAGE_TAG: ubuntu-18.04-pinned + PLATFORM_TYPE: pinned + ubuntu-1804-wasm-test: + name: Ubuntu 18.04 | WASM Spec Test + runs-on: ubuntu-latest + needs: ubuntu-1804-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: ubuntu-1804-build + - name: WASM Spec Test + run: | + tar -xzf ubuntu-1804-build/build.tar.gz + ./.cicd/test.sh scripts/wasm-spec-test.sh + env: + IMAGE_TAG: ubuntu-18.04-pinned + PLATFORM_TYPE: pinned + ubuntu-1804-serial-test: + name: Ubuntu 18.04 | Serial Test + runs-on: ubuntu-latest + needs: ubuntu-1804-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: ubuntu-1804-build + - name: Serial Test + run: | + tar -xzf ubuntu-1804-build/build.tar.gz + ./.cicd/test.sh scripts/serial-test.sh + env: + IMAGE_TAG: ubuntu-18.04-pinned + PLATFORM_TYPE: pinned + + + macos-1015-build: + if: github.event.pull_request.base.repo.id != github.event.pull_request.head.repo.id + name: MacOS 10.15 | Build + runs-on: macos-latest + needs: start-job + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Build + run: | + ./.cicd/platforms/unpinned/macos-10.14-unpinned.sh + ./.cicd/build.sh + tar -pczf build.tar.gz build + - name: Upload Build Artifact + uses: actions/upload-artifact@v1 + with: + name: macos-1015-build + path: build.tar.gz + macos-1015-parallel-test: + name: MacOS 10.15 | Parallel Test + runs-on: macos-latest + needs: macos-1015-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: macos-1015-build + - name: Parallel Test + run: | + ./.cicd/platforms/unpinned/macos-10.14-unpinned.sh + tar -xzf macos-1015-build/build.tar.gz + ./.cicd/test.sh scripts/parallel-test.sh + macos-1015-wasm-test: + name: MacOS 10.15 | WASM Spec Test + runs-on: macos-latest + needs: macos-1015-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: macos-1015-build + - name: WASM Spec Test + run: | + ./.cicd/platforms/unpinned/macos-10.14-unpinned.sh + tar -xzf macos-1015-build/build.tar.gz + ./.cicd/test.sh scripts/wasm-spec-test.sh + macos-1015-serial-test: + name: MacOS 10.15 | Serial Test + runs-on: macos-latest + needs: macos-1015-build + steps: + - name: Checkout + uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e + with: + submodules: recursive + - name: Download Build Artifact + uses: actions/download-artifact@v1 + with: + name: macos-1015-build + - name: Serial Test + run: | + ./.cicd/platforms/unpinned/macos-10.14-unpinned.sh + tar -xzf macos-1015-build/build.tar.gz + ./.cicd/test.sh scripts/serial-test.sh \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 83ab0ce1b7a..00000000000 --- a/.travis.yml +++ /dev/null @@ -1,61 +0,0 @@ -language: cpp -git: - depth: false # prevent git describe failure when executing the appbase version.cmake.in -cache: - ccache: true - directories: - - $HOME/Library/Caches/Homebrew -if: fork = true OR type = api OR type = cron -matrix: - include: - - os: linux - dist: xenial - services: docker - env: - - PLATFORM_TYPE='pinned' - - IMAGE_TAG='ubuntu-18.04-pinned' - - os: linux - dist: xenial - services: docker - env: - - PLATFORM_TYPE='pinned' - - IMAGE_TAG='ubuntu-16.04-pinned' - - os: linux - dist: xenial - services: docker - env: - - PLATFORM_TYPE='pinned' - - IMAGE_TAG='amazon_linux-2-pinned' - - os: linux - dist: xenial - services: docker - env: - - PLATFORM_TYPE='pinned' - - IMAGE_TAG='centos-7.7-pinned' - - os: osx - osx_image: xcode10.2 - addons: - homebrew: - update: true - packages: - - ccache - - jq - - boost - - python@2 - - python - - libtool - - libusb - - graphviz - - automake - - wget - - gmp - - llvm@7 - - pkgconfig - - doxygen - - openssl@1.1 - env: - - PATH="/usr/local/opt/ccache/libexec:$PATH" -script: "ccache --max-size=1G && ./.cicd/build.sh && ./.cicd/test.sh scripts/parallel-test.sh && ./.cicd/test.sh scripts/serial-test.sh && if [[ $(uname) != 'Darwin' ]]; then ./.cicd/submodule-regression-check.sh; fi" -notifications: - webhooks: - secure: gmqODqoFAil2cR7v++ibqRNECBOSD/VJX+2qPa7XptkVWmVMzbII5CNgBQAscjFsp9arHPMXCCzkBi847PCSiHdsnYFQ4T273FLRWr3cDbLjfmR+BJ7dGKvQnlpSi2Ze2TtAPJyRl+iv+cxDj7cWE5zw2c4xbgh1a/cNO+/ayUfFkyMEIfVWRsHkdkra4gOLywou0XRLHr4CX1V60uU7uuqATnIMMi7gQYwiKKtZqjkbf8wcBvZirDhjQ6lDPN5tnZo6L4QHmqjtzNJg/UrD4h+zES53dLVI4uxlXRAwwpw+mJOFA3QE/3FT+bMQjLCffUz4gZaWcdgebPYzrwSWUbJoFdWAOwcTqivQY0FIQzcz/r6uGWcwWTavzkPEbg68BVM2BZId/0110J6feeTkpJ3MPV+UsIoGTvbg50vi/I06icftuZ/cLqDj3+Emifm7Jlr1sRTSdqtYAJj/2ImUfsb46cwgjAVhFOTvc+KuPgJQgvOXV7bZkxEr5qDWo8Al2sV8BWb83j1rMlZ4LfERokImDVqxu2kkcunchzvhtYFTesSpmwegVpwceCtOtO0rEUgATnfTEHzk2rm8nuz4UtidsQnluUKqmKD0QCqHXFfn+3ZRJsDqr+iCYdxv1BAeAVc9q1L7bgrKDMGiJgkxuhZ2v3J2SflWLvjZjFDduuc= diff --git a/CMakeLists.txt b/CMakeLists.txt index 0edcb4ca7fd..a3c0709b636 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -25,7 +25,7 @@ set( CXX_STANDARD_REQUIRED ON) set(VERSION_MAJOR 2) set(VERSION_MINOR 0) -set(VERSION_PATCH 2) +set(VERSION_PATCH 3) #set(VERSION_SUFFIX rc3) if(VERSION_SUFFIX) diff --git a/README.md b/README.md index 346e0fdf695..a71d2b2b160 100644 --- a/README.md +++ b/README.md @@ -74,13 +74,13 @@ $ brew remove eosio #### Ubuntu 18.04 Package Install ```sh -$ wget https://github.com/eosio/eos/releases/download/v2.0.2/eosio_2.0.2-1-ubuntu-18.04_amd64.deb -$ sudo apt install ./eosio_2.0.2-1-ubuntu-18.04_amd64.deb +$ wget https://github.com/eosio/eos/releases/download/v2.0.3/eosio_2.0.3-1-ubuntu-18.04_amd64.deb +$ sudo apt install ./eosio_2.0.3-1-ubuntu-18.04_amd64.deb ``` #### Ubuntu 16.04 Package Install ```sh -$ wget https://github.com/eosio/eos/releases/download/v2.0.2/eosio_2.0.2-1-ubuntu-16.04_amd64.deb -$ sudo apt install ./eosio_2.0.2-1-ubuntu-16.04_amd64.deb +$ wget https://github.com/eosio/eos/releases/download/v2.0.3/eosio_2.0.3-1-ubuntu-16.04_amd64.deb +$ sudo apt install ./eosio_2.0.3-1-ubuntu-16.04_amd64.deb ``` #### Ubuntu Package Uninstall ```sh @@ -91,8 +91,8 @@ $ sudo apt remove eosio #### RPM Package Install ```sh -$ wget https://github.com/eosio/eos/releases/download/v2.0.2/eosio-2.0.2-1.el7.x86_64.rpm -$ sudo yum install ./eosio-2.0.2-1.el7.x86_64.rpm +$ wget https://github.com/eosio/eos/releases/download/v2.0.3/eosio-2.0.3-1.el7.x86_64.rpm +$ sudo yum install ./eosio-2.0.3-1.el7.x86_64.rpm ``` #### RPM Package Uninstall ```sh diff --git a/docs/00_install/00_install-prebuilt-binaries.md b/docs/00_install/00_install-prebuilt-binaries.md index 5c67de3d7db..ae121a22fb5 100644 --- a/docs/00_install/00_install-prebuilt-binaries.md +++ b/docs/00_install/00_install-prebuilt-binaries.md @@ -25,13 +25,13 @@ brew remove eosio #### Ubuntu 18.04 Package Install ```sh -wget https://github.com/eosio/eos/releases/download/v2.0.2/eosio_2.0.2-1-ubuntu-18.04_amd64.deb -sudo apt install ./eosio_2.0.2-1-ubuntu-18.04_amd64.deb +wget https://github.com/eosio/eos/releases/download/v2.0.3/eosio_2.0.3-1-ubuntu-18.04_amd64.deb +sudo apt install ./eosio_2.0.3-1-ubuntu-18.04_amd64.deb ``` #### Ubuntu 16.04 Package Install ```sh -wget https://github.com/eosio/eos/releases/download/v2.0.2/eosio_2.0.2-1-ubuntu-16.04_amd64.deb -sudo apt install ./eosio_2.0.2-1-ubuntu-16.04_amd64.deb +wget https://github.com/eosio/eos/releases/download/v2.0.3/eosio_2.0.3-1-ubuntu-16.04_amd64.deb +sudo apt install ./eosio_2.0.3-1-ubuntu-16.04_amd64.deb ``` #### Ubuntu Package Uninstall ```sh @@ -42,8 +42,8 @@ sudo apt remove eosio #### RPM Package Install ```sh -wget https://github.com/eosio/eos/releases/download/v2.0.2/eosio-2.0.2-1.el7.x86_64.rpm -sudo yum install ./eosio-2.0.2-1.el7.x86_64.rpm +wget https://github.com/eosio/eos/releases/download/v2.0.3/eosio-2.0.3-1.el7.x86_64.rpm +sudo yum install ./eosio-2.0.3-1.el7.x86_64.rpm ``` #### RPM Package Uninstall ```sh diff --git a/docs/00_install/01_build-from-source/02_manual-build/03_platforms/macos-10.14-pinned.md b/docs/00_install/01_build-from-source/02_manual-build/03_platforms/macos-10.14-pinned.md index edd27859c0b..826e59fb016 100644 --- a/docs/00_install/01_build-from-source/02_manual-build/03_platforms/macos-10.14-pinned.md +++ b/docs/00_install/01_build-from-source/02_manual-build/03_platforms/macos-10.14-pinned.md @@ -37,7 +37,7 @@ cd $EOSIO_LOCATION && git submodule update --init --recursive These commands install the EOSIO software dependencies. Make sure to [Download the EOSIO Repository](#download-eosio-repository) first and set the EOSIO directories. ```sh # install dependencies -brew install cmake python@2 python libtool libusb graphviz automake wget gmp pkgconfig doxygen openssl@1.1 jq || : +brew install cmake python libtool libusb graphviz automake wget gmp pkgconfig doxygen openssl@1.1 jq || : # Boost Fix: eosio/install/bin/../include/c++/v1/stdlib.h:94:15: fatal error: 'stdlib.h' file not found SDKROOT="$(xcrun --sdk macosx --show-sdk-path)" # build clang diff --git a/docs/00_install/01_build-from-source/02_manual-build/03_platforms/macos-10.14-unpinned.md b/docs/00_install/01_build-from-source/02_manual-build/03_platforms/macos-10.14-unpinned.md index e3596fb21e4..826b3a1bfa2 100644 --- a/docs/00_install/01_build-from-source/02_manual-build/03_platforms/macos-10.14-unpinned.md +++ b/docs/00_install/01_build-from-source/02_manual-build/03_platforms/macos-10.14-unpinned.md @@ -37,7 +37,7 @@ cd $EOSIO_LOCATION && git submodule update --init --recursive These commands install the EOSIO software dependencies. Make sure to [Download the EOSIO Repository](#download-eosio-repository) first and set the EOSIO directories. ```sh # install dependencies -brew install cmake python@2 python libtool libusb graphviz automake wget gmp pkgconfig doxygen openssl@1.1 jq boost || : +brew install cmake python libtool libusb graphviz automake wget gmp pkgconfig doxygen openssl@1.1 jq boost || : export PATH=$EOSIO_INSTALL_LOCATION/bin:$PATH # install mongodb mkdir -p $EOSIO_INSTALL_LOCATION/bin diff --git a/libraries/chain/abi_serializer.cpp b/libraries/chain/abi_serializer.cpp index d5e100428c0..7b3f5a469e8 100644 --- a/libraries/chain/abi_serializer.cpp +++ b/libraries/chain/abi_serializer.cpp @@ -25,24 +25,51 @@ namespace eosio { namespace chain { return fc::variant(temp); } + template + inline fc::variant variant_from_stream(fc::datastream& stream, const fc::time_point& deadline) { + T temp; + fc::raw::unpack( stream, temp ); + FC_CHECK_DEADLINE(deadline); + return fc::variant( temp, deadline ); + } + + template + auto pack_function() { + return []( const fc::variant& var, fc::datastream& ds, bool is_array, bool is_optional, const fc::time_point& deadline ){ + if( is_array ) + fc::raw::pack( ds, var.as>() ); + else if ( is_optional ) + fc::raw::pack( ds, var.as>() ); + else + fc::raw::pack( ds, var.as()); + }; + } + template auto pack_unpack() { return std::make_pair( - []( fc::datastream& stream, bool is_array, bool is_optional) -> fc::variant { + []( fc::datastream& stream, bool is_array, bool is_optional, const fc::time_point& deadline) -> fc::variant { if( is_array ) return variant_from_stream>(stream); else if ( is_optional ) return variant_from_stream>(stream); return variant_from_stream(stream); }, - []( const fc::variant& var, fc::datastream& ds, bool is_array, bool is_optional ){ + pack_function() + ); + } + + template + auto pack_unpack_deadline() { + return std::make_pair( + []( fc::datastream& stream, bool is_array, bool is_optional, const fc::time_point& deadline) -> fc::variant { if( is_array ) - fc::raw::pack( ds, var.as>() ); + return variant_from_stream>(stream); else if ( is_optional ) - fc::raw::pack( ds, var.as>() ); - else - fc::raw::pack( ds, var.as()); - } + return variant_from_stream>(stream); + return variant_from_stream(stream, deadline); + }, + pack_function() ); } @@ -90,8 +117,8 @@ namespace eosio { namespace chain { built_in_types.emplace("checksum256", pack_unpack()); built_in_types.emplace("checksum512", pack_unpack()); - built_in_types.emplace("public_key", pack_unpack()); - built_in_types.emplace("signature", pack_unpack()); + built_in_types.emplace("public_key", pack_unpack_deadline()); + built_in_types.emplace("signature", pack_unpack_deadline()); built_in_types.emplace("symbol", pack_unpack()); built_in_types.emplace("symbol_code", pack_unpack()); @@ -318,7 +345,7 @@ namespace eosio { namespace chain { auto btype = built_in_types.find(ftype ); if( btype != built_in_types.end() ) { try { - return btype->second.first(stream, is_array(rtype), is_optional(rtype)); + return btype->second.first(stream, is_array(rtype), is_optional(rtype), ctx.get_deadline()); } EOS_RETHROW_EXCEPTIONS( unpack_exception, "Unable to unpack ${class} type '${type}' while processing '${p}'", ("class", is_array(rtype) ? "array of built-in" : is_optional(rtype) ? "optional of built-in" : "built-in") ("type", impl::limit_size(ftype))("p", ctx.get_path_string()) ) @@ -403,7 +430,7 @@ namespace eosio { namespace chain { auto btype = built_in_types.find(fundamental_type(rtype)); if( btype != built_in_types.end() ) { - btype->second.second(var, ds, is_array(rtype), is_optional(rtype)); + btype->second.second(var, ds, is_array(rtype), is_optional(rtype), ctx.get_deadline()); } else if ( is_array(rtype) ) { ctx.hint_array_type_if_in_array(); vector vars = var.get_array(); diff --git a/libraries/chain/include/eosio/chain/abi_serializer.hpp b/libraries/chain/include/eosio/chain/abi_serializer.hpp index c8c2efc1c8d..16978a50119 100644 --- a/libraries/chain/include/eosio/chain/abi_serializer.hpp +++ b/libraries/chain/include/eosio/chain/abi_serializer.hpp @@ -81,8 +81,8 @@ struct abi_serializer { return false; } - typedef std::function&, bool, bool)> unpack_function; - typedef std::function&, bool, bool)> pack_function; + typedef std::function&, bool, bool, const fc::time_point&)> unpack_function; + typedef std::function&, bool, bool, const fc::time_point&)> pack_function; void add_specialized_unpack_pack( const string& name, std::pair unpack_pack ); @@ -139,6 +139,7 @@ namespace impl { {} void check_deadline()const; + const fc::time_point& get_deadline()const { return deadline; } fc::scoped_exit> enter_scope(); diff --git a/libraries/chain/include/eosio/chain/authority.hpp b/libraries/chain/include/eosio/chain/authority.hpp index 492fd9b404f..b635f3942a4 100644 --- a/libraries/chain/include/eosio/chain/authority.hpp +++ b/libraries/chain/include/eosio/chain/authority.hpp @@ -29,8 +29,8 @@ struct shared_public_key { return std::move(public_key_storage); } - operator string() const { - return (string)this->operator public_key_type(); + std::string to_string() const { + return this->operator public_key_type().to_string(); } shared_public_key_data pubkey; diff --git a/libraries/chain/include/eosio/chain/config.hpp b/libraries/chain/include/eosio/chain/config.hpp index 55fe238ccf6..6f27b174b8e 100644 --- a/libraries/chain/include/eosio/chain/config.hpp +++ b/libraries/chain/include/eosio/chain/config.hpp @@ -97,7 +97,7 @@ const static uint32_t setcode_ram_bytes_multiplier = 10; ///< multip const static uint32_t hashing_checktime_block_size = 10*1024; /// call checktime from hashing intrinsic once per this number of bytes const static eosio::chain::wasm_interface::vm_type default_wasm_runtime = eosio::chain::wasm_interface::vm_type::wabt; -const static uint32_t default_abi_serializer_max_time_ms = 15*1000; ///< default deadline for abi serialization methods +const static uint32_t default_abi_serializer_max_time_us = 15*1000; ///< default deadline for abi serialization methods /** * The number of sequential blocks produced by a single producer diff --git a/libraries/fc b/libraries/fc index 33c5680258f..5c3740a5efe 160000 --- a/libraries/fc +++ b/libraries/fc @@ -1 +1 @@ -Subproject commit 33c5680258f3268f723d97c0156ce85290b2d569 +Subproject commit 5c3740a5efec1e1d592e0f8ce092872df46c92d2 diff --git a/plugins/chain_plugin/chain_plugin.cpp b/plugins/chain_plugin/chain_plugin.cpp index 468c1290e8b..4400c1d69d1 100644 --- a/plugins/chain_plugin/chain_plugin.cpp +++ b/plugins/chain_plugin/chain_plugin.cpp @@ -148,7 +148,7 @@ class chain_plugin_impl { fc::optional genesis; //txn_msg_rate_limits rate_limits; fc::optional wasm_runtime; - fc::microseconds abi_serializer_max_time_ms; + fc::microseconds abi_serializer_max_time_us; fc::optional snapshot_path; @@ -207,7 +207,7 @@ void chain_plugin::set_program_options(options_description& cli, options_descrip } #endif }), "Override default WASM runtime") - ("abi-serializer-max-time-ms", bpo::value()->default_value(config::default_abi_serializer_max_time_ms), + ("abi-serializer-max-time-ms", bpo::value()->default_value(config::default_abi_serializer_max_time_us / 1000), "Override default maximum ABI serialization time allowed in ms") ("chain-state-db-size-mb", bpo::value()->default_value(config::default_state_size / (1024 * 1024)), "Maximum size (in MiB) of the chain state database") ("chain-state-db-guard-size-mb", bpo::value()->default_value(config::default_state_guard_size / (1024 * 1024)), "Safely shut down node when free space remaining in the chain state database drops below this size (in MiB).") @@ -692,7 +692,7 @@ void chain_plugin::plugin_initialize(const variables_map& options) { my->wasm_runtime = options.at( "wasm-runtime" ).as(); if(options.count("abi-serializer-max-time-ms")) - my->abi_serializer_max_time_ms = fc::microseconds(options.at("abi-serializer-max-time-ms").as() * 1000); + my->abi_serializer_max_time_us = fc::microseconds(options.at("abi-serializer-max-time-ms").as() * 1000); my->chain_config->blocks_dir = my->blocks_dir; my->chain_config->state_dir = app().data_dir() / config::default_state_dir_name; @@ -1363,7 +1363,7 @@ chain::chain_id_type chain_plugin::get_chain_id()const { } fc::microseconds chain_plugin::get_abi_serializer_max_time() const { - return my->abi_serializer_max_time_ms; + return my->abi_serializer_max_time_us; } void chain_plugin::log_guard_exception(const chain::guard_exception&e ) { @@ -1811,7 +1811,7 @@ fc::variant read_only::get_currency_stats( const read_only::get_currency_stats_p return results; } -fc::variant get_global_row( const database& db, const abi_def& abi, const abi_serializer& abis, const fc::microseconds& abi_serializer_max_time_ms, bool shorten_abi_errors ) { +fc::variant get_global_row( const database& db, const abi_def& abi, const abi_serializer& abis, const fc::microseconds& abi_serializer_max_time_us, bool shorten_abi_errors ) { const auto table_type = get_table_type(abi, N(global)); EOS_ASSERT(table_type == read_only::KEYi64, chain::contract_table_query_exception, "Invalid table type ${type} for table global", ("type",table_type)); @@ -1824,7 +1824,7 @@ fc::variant get_global_row( const database& db, const abi_def& abi, const abi_se vector data; read_only::copy_inline_row(*it, data); - return abis.binary_to_variant(abis.get_table_type(N(global)), data, abi_serializer_max_time_ms, shorten_abi_errors ); + return abis.binary_to_variant(abis.get_table_type(N(global)), data, abi_serializer_max_time_us, shorten_abi_errors ); } read_only::get_producers_result read_only::get_producers( const read_only::get_producers_params& p ) const try { diff --git a/plugins/mongo_db_plugin/mongo_db_plugin.cpp b/plugins/mongo_db_plugin/mongo_db_plugin.cpp index 60cc44d47e3..83b398c2ecb 100644 --- a/plugins/mongo_db_plugin/mongo_db_plugin.cpp +++ b/plugins/mongo_db_plugin/mongo_db_plugin.cpp @@ -634,13 +634,13 @@ optional mongo_db_plugin_impl::get_abi_serializer( account_name // unpack setabi.abi as abi_def instead of as bytes abis.add_specialized_unpack_pack( "abi_def", std::make_pair( - []( fc::datastream& stream, bool is_array, bool is_optional ) -> fc::variant { + []( fc::datastream& stream, bool is_array, bool is_optional, const fc::time_point& deadline ) -> fc::variant { EOS_ASSERT( !is_array && !is_optional, chain::mongo_db_exception, "unexpected abi_def"); chain::bytes temp; fc::raw::unpack( stream, temp ); return fc::variant( fc::raw::unpack( temp ) ); }, - []( const fc::variant& var, fc::datastream& ds, bool is_array, bool is_optional ) { + []( const fc::variant& var, fc::datastream& ds, bool is_array, bool is_optional, const fc::time_point& deadline ) { EOS_ASSERT( false, chain::mongo_db_exception, "never called" ); } ) ); @@ -1110,7 +1110,7 @@ void mongo_db_plugin_impl::add_pub_keys( const vector& keys, auto find_doc = bsoncxx::builder::basic::document(); find_doc.append( kvp( "account", name.to_string()), - kvp( "public_key", pub_key_weight.key.operator string()), + kvp( "public_key", pub_key_weight.key.to_string()), kvp( "permission", permission.to_string()) ); auto update_doc = make_document( kvp( "$set", make_document( bsoncxx::builder::concatenate_doc{find_doc.view()}, diff --git a/plugins/net_plugin/include/eosio/net_plugin/protocol.hpp b/plugins/net_plugin/include/eosio/net_plugin/protocol.hpp index 8ce781cefd5..56c33909851 100644 --- a/plugins/net_plugin/include/eosio/net_plugin/protocol.hpp +++ b/plugins/net_plugin/include/eosio/net_plugin/protocol.hpp @@ -77,16 +77,16 @@ namespace eosio { } struct go_away_message { - go_away_message (go_away_reason r = no_reason) : reason(r), node_id() {} - go_away_reason reason; + go_away_message(go_away_reason r = no_reason) : reason(r), node_id() {} + go_away_reason reason{no_reason}; fc::sha256 node_id; ///< for duplicate notification }; struct time_message { - tstamp org; //!< origin timestamp - tstamp rec; //!< receive timestamp - tstamp xmt; //!< transmit timestamp - mutable tstamp dst; //!< destination timestamp + tstamp org{0}; //!< origin timestamp + tstamp rec{0}; //!< receive timestamp + tstamp xmt{0}; //!< transmit timestamp + mutable tstamp dst{0}; //!< destination timestamp }; enum id_list_modes { @@ -108,9 +108,9 @@ namespace eosio { template struct select_ids { - select_ids () : mode(none),pending(0),ids() {} - id_list_modes mode; - uint32_t pending; + select_ids() : mode(none),pending(0),ids() {} + id_list_modes mode{none}; + uint32_t pending{0}; vector ids; bool empty () const { return (mode == none || ids.empty()); } }; @@ -119,20 +119,20 @@ namespace eosio { using ordered_blk_ids = select_ids; struct notice_message { - notice_message () : known_trx(), known_blocks() {} + notice_message() : known_trx(), known_blocks() {} ordered_txn_ids known_trx; ordered_blk_ids known_blocks; }; struct request_message { - request_message () : req_trx(), req_blocks() {} + request_message() : req_trx(), req_blocks() {} ordered_txn_ids req_trx; ordered_blk_ids req_blocks; }; struct sync_request_message { - uint32_t start_block; - uint32_t end_block; + uint32_t start_block{0}; + uint32_t end_block{0}; }; using net_message = static_variant sync_state; + std::atomic sync_state{in_sync}; private: constexpr static auto stage_str( stages s ); @@ -229,9 +229,9 @@ namespace eosio { }; possible_connections allowed_connections{None}; - boost::asio::steady_timer::duration connector_period; - boost::asio::steady_timer::duration txn_exp_period; - boost::asio::steady_timer::duration resp_expected_period; + boost::asio::steady_timer::duration connector_period{0}; + boost::asio::steady_timer::duration txn_exp_period{0}; + boost::asio::steady_timer::duration resp_expected_period{0}; boost::asio::steady_timer::duration keepalive_interval{std::chrono::seconds{32}}; int max_cleanup_time_ms = 0; diff --git a/plugins/producer_plugin/producer_plugin.cpp b/plugins/producer_plugin/producer_plugin.cpp index dcc9c5a99b4..c67f53a1e11 100644 --- a/plugins/producer_plugin/producer_plugin.cpp +++ b/plugins/producer_plugin/producer_plugin.cpp @@ -648,7 +648,9 @@ void producer_plugin::set_program_options( "ID of producer controlled by this node (e.g. inita; may specify multiple times)") ("private-key", boost::program_options::value>()->composing()->multitoken(), "(DEPRECATED - Use signature-provider instead) Tuple of [public key, WIF private key] (may specify multiple times)") - ("signature-provider", boost::program_options::value>()->composing()->multitoken()->default_value({std::string(default_priv_key.get_public_key()) + "=KEY:" + std::string(default_priv_key)}, std::string(default_priv_key.get_public_key()) + "=KEY:" + std::string(default_priv_key)), + ("signature-provider", boost::program_options::value>()->composing()->multitoken()->default_value( + {default_priv_key.get_public_key().to_string() + "=KEY:" + default_priv_key.to_string()}, + default_priv_key.get_public_key().to_string() + "=KEY:" + default_priv_key.to_string()), "Key=Value pairs in the form =\n" "Where:\n" " \tis a string form of a vaild EOSIO public key\n\n" @@ -773,7 +775,7 @@ void producer_plugin::plugin_initialize(const boost::program_options::variables_ try { auto key_id_to_wif_pair = dejsonify>(key_id_to_wif_pair_string); my->_signature_providers[key_id_to_wif_pair.first] = make_key_signature_provider(key_id_to_wif_pair.second); - auto blanked_privkey = std::string(std::string(key_id_to_wif_pair.second).size(), '*' ); + auto blanked_privkey = std::string(key_id_to_wif_pair.second.to_string().size(), '*' ); wlog("\"private-key\" is DEPRECATED, use \"signature-provider=${pub}=KEY:${priv}\"", ("pub",key_id_to_wif_pair.first)("priv", blanked_privkey)); } catch ( fc::exception& e ) { elog("Malformed private key pair"); diff --git a/plugins/wallet_plugin/se_wallet.cpp b/plugins/wallet_plugin/se_wallet.cpp index 84cda429e88..dc919554e0b 100644 --- a/plugins/wallet_plugin/se_wallet.cpp +++ b/plugins/wallet_plugin/se_wallet.cpp @@ -359,7 +359,7 @@ bool se_wallet::import_key(string wif_key) { string se_wallet::create_key(string key_type) { EOS_ASSERT(key_type.empty() || key_type == "R1", chain::unsupported_key_type_exception, "Secure Enclave wallet only supports R1 keys"); - return (string)my->create(); + return my->create().to_string(); } bool se_wallet::remove_key(string key) { diff --git a/plugins/wallet_plugin/wallet.cpp b/plugins/wallet_plugin/wallet.cpp index 6c64de75491..c71b52993e5 100644 --- a/plugins/wallet_plugin/wallet.cpp +++ b/plugins/wallet_plugin/wallet.cpp @@ -183,8 +183,8 @@ class soft_wallet_impl else EOS_THROW(chain::unsupported_key_type_exception, "Key type \"${kt}\" not supported by software wallet", ("kt", key_type)); - import_key((string)priv_key); - return (string)priv_key.get_public_key(); + import_key(priv_key.to_string()); + return priv_key.get_public_key().to_string(); } bool load_wallet_file(string wallet_filename = "") diff --git a/plugins/wallet_plugin/wallet_manager.cpp b/plugins/wallet_plugin/wallet_manager.cpp index 304fdccceb8..0b3bbc6c696 100644 --- a/plugins/wallet_plugin/wallet_manager.cpp +++ b/plugins/wallet_plugin/wallet_manager.cpp @@ -12,7 +12,7 @@ constexpr auto password_prefix = "PW"; std::string gen_password() { auto key = private_key_type::generate(); - return password_prefix + string(key); + return password_prefix + key.to_string(); } diff --git a/plugins/wallet_plugin/yubihsm_wallet.cpp b/plugins/wallet_plugin/yubihsm_wallet.cpp index cce98fc69e5..0ff2d83116f 100644 --- a/plugins/wallet_plugin/yubihsm_wallet.cpp +++ b/plugins/wallet_plugin/yubihsm_wallet.cpp @@ -252,7 +252,7 @@ bool yubihsm_wallet::import_key(string wif_key) { string yubihsm_wallet::create_key(string key_type) { EOS_ASSERT(key_type.empty() || key_type == "R1", chain::unsupported_key_type_exception, "YubiHSM wallet only supports R1 keys"); - return (string)my->create(); + return my->create().to_string(); } bool yubihsm_wallet::remove_key(string key) { diff --git a/programs/cleos/main.cpp b/programs/cleos/main.cpp index 0c827426af1..876119bd1d9 100644 --- a/programs/cleos/main.cpp +++ b/programs/cleos/main.cpp @@ -2149,7 +2149,7 @@ void get_account( const string& accountName, const string& coresym, bool json_fo std::cout << indent << std::string(depth*3, ' ') << name << ' ' << std::setw(5) << p.required_auth.threshold << ": "; const char *sep = ""; for ( auto it = p.required_auth.keys.begin(); it != p.required_auth.keys.end(); ++it ) { - std::cout << sep << it->weight << ' ' << string(it->key); + std::cout << sep << it->weight << ' ' << it->key.to_string(); sep = ", "; } for ( auto& acc : p.required_auth.accounts ) { @@ -2444,8 +2444,8 @@ int main( int argc, char** argv ) { } auto pk = r1 ? private_key_type::generate_r1() : private_key_type::generate(); - auto privs = string(pk); - auto pubs = string(pk.get_public_key()); + auto privs = pk.to_string(); + auto pubs = pk.get_public_key().to_string(); if (print_console) { std::cout << localized("Private key: ${key}", ("key", privs) ) << std::endl; std::cout << localized("Public key: ${key}", ("key", pubs ) ) << std::endl; @@ -3290,7 +3290,7 @@ int main( int argc, char** argv ) { fc::variants vs = {fc::variant(wallet_name), fc::variant(wallet_key)}; call(wallet_url, wallet_import_key, vs); - std::cout << localized("imported private key for: ${pubkey}", ("pubkey", std::string(pubkey))) << std::endl; + std::cout << localized("imported private key for: ${pubkey}", ("pubkey", pubkey.to_string())) << std::endl; }); // remove keys from wallet diff --git a/programs/eosio-launcher/main.cpp b/programs/eosio-launcher/main.cpp index 74e3ac3340d..8d1a4247b1b 100644 --- a/programs/eosio-launcher/main.cpp +++ b/programs/eosio-launcher/main.cpp @@ -1115,9 +1115,9 @@ launcher_def::write_config_file (tn_node_def &node) { } if (allowed_connections & PC_SPECIFIED) { cfg << "allowed-connection = specified\n"; - cfg << "peer-key = \"" << string(node.keys.begin()->get_public_key()) << "\"\n"; - cfg << "peer-private-key = [\"" << string(node.keys.begin()->get_public_key()) - << "\",\"" << string(*node.keys.begin()) << "\"]\n"; + cfg << "peer-key = \"" << node.keys.begin()->get_public_key().to_string() << "\"\n"; + cfg << "peer-private-key = [\"" << node.keys.begin()->get_public_key().to_string() + << "\",\"" << node.keys.begin()->to_string() << "\"]\n"; } } @@ -1130,8 +1130,8 @@ launcher_def::write_config_file (tn_node_def &node) { } if (instance.has_db || node.producers.size()) { for (const auto &kp : node.keys ) { - cfg << "private-key = [\"" << string(kp.get_public_key()) - << "\",\"" << string(kp) << "\"]\n"; + cfg << "private-key = [\"" << kp.get_public_key().to_string() + << "\",\"" << kp.to_string() << "\"]\n"; } for (auto &p : node.producers) { cfg << "producer-name = " << p << "\n"; @@ -1207,7 +1207,7 @@ launcher_def::init_genesis () { eosio::chain::genesis_state default_genesis; fc::json::save_to_file( default_genesis, genesis_path, true ); } - string bioskey = string(network.nodes["bios"].keys[0].get_public_key()); + string bioskey = network.nodes["bios"].keys[0].get_public_key().to_string(); fc::json::from_file(genesis_path).as(genesis_from_file); genesis_from_file.initial_key = public_key_type(bioskey); @@ -1277,7 +1277,7 @@ launcher_def::write_bios_boot () { } else if (key == "prodkeys" ) { for (auto &node : network.nodes) { - brb << "wcmd import -n ignition --private-key " << string(node.second.keys[0]) << "\n"; + brb << "wcmd import -n ignition --private-key " << node.second.keys[0].to_string() << "\n"; } } else if (key == "cacmd") { @@ -1286,7 +1286,7 @@ launcher_def::write_bios_boot () { continue; } brb << "cacmd " << p.producer_name - << " " << string(p.block_signing_key) << " " << string(p.block_signing_key) << "\n"; + << " " << p.block_signing_key.to_string() << " " << p.block_signing_key.to_string() << "\n"; } } } diff --git a/scripts/eosio_build_darwin.sh b/scripts/eosio_build_darwin.sh index d6c8d11389e..9f008a1c68b 100755 --- a/scripts/eosio_build_darwin.sh +++ b/scripts/eosio_build_darwin.sh @@ -6,7 +6,7 @@ echo "Disk install: ${DISK_INSTALL}" echo "Disk space total: ${DISK_TOTAL}G" echo "Disk space available: ${DISK_AVAIL}G" -[[ "${OS_MIN}" -lt 12 ]] && echo "You must be running Mac OS 10.12.x or higher to install EOSIO." && exit 1 +[[ "${OS_MIN}" -lt 14 ]] && echo "You must be running Mac OS 10.14.x or higher to install EOSIO." && exit 1 [[ $MEM_GIG -lt 7 ]] && echo "Your system must have 7 or more Gigabytes of physical memory installed." && exit 1 [[ "${DISK_AVAIL}" -lt "${DISK_MIN}" ]] && echo " - You must have at least ${DISK_MIN}GB of available storage to install EOSIO." && exit 1 diff --git a/scripts/eosio_build_darwin_deps b/scripts/eosio_build_darwin_deps index d9a3bdb9e26..e8e5713464a 100755 --- a/scripts/eosio_build_darwin_deps +++ b/scripts/eosio_build_darwin_deps @@ -6,7 +6,6 @@ wget,/usr/local/bin/wget gmp,/usr/local/opt/gmp/include/gmpxx.h pkgconfig,/usr/local/bin/pkg-config python,/usr/local/opt/python3 -python@2,/usr/local/opt/python2 doxygen,/usr/local/bin/doxygen libusb,/usr/local/lib/libusb-1.0.0.dylib openssl@1.1,/usr/local/opt/openssl@1.1/lib/libssl.a \ No newline at end of file diff --git a/tests/wallet_tests.cpp b/tests/wallet_tests.cpp index 64a9fe0db4a..5787b5e67a7 100644 --- a/tests/wallet_tests.cpp +++ b/tests/wallet_tests.cpp @@ -32,12 +32,12 @@ BOOST_AUTO_TEST_CASE(wallet_test) auto priv = fc::crypto::private_key::generate(); auto pub = priv.get_public_key(); - auto wif = (std::string)priv; + auto wif = priv.to_string(); wallet.import_key(wif); BOOST_CHECK_EQUAL(1u, wallet.list_keys().size()); auto privCopy = wallet.get_private_key(pub); - BOOST_CHECK_EQUAL(wif, (std::string)privCopy); + BOOST_CHECK_EQUAL(wif, privCopy.to_string()); wallet.lock(); BOOST_CHECK(wallet.is_locked()); @@ -57,7 +57,7 @@ BOOST_AUTO_TEST_CASE(wallet_test) BOOST_CHECK_EQUAL(1u, wallet2.list_keys().size()); auto privCopy2 = wallet2.get_private_key(pub); - BOOST_CHECK_EQUAL(wif, (std::string)privCopy2); + BOOST_CHECK_EQUAL(wif, privCopy2.to_string()); fc::remove("wallet_test.json"); } FC_LOG_AND_RETHROW() } @@ -115,7 +115,7 @@ BOOST_AUTO_TEST_CASE(wallet_manager_test) // key3 was not automatically imported BOOST_CHECK(std::find(keys.cbegin(), keys.cend(), pub_pri_pair(key3)) == keys.cend()); - wm.remove_key("test", pw, string(pub_pri_pair(key2).first)); + wm.remove_key("test", pw, pub_pri_pair(key2).first.to_string()); BOOST_CHECK_EQUAL(1u, wm.get_public_keys().size()); keys = wm.list_keys("test", pw); BOOST_CHECK(std::find(keys.cbegin(), keys.cend(), pub_pri_pair(key2)) == keys.cend()); @@ -123,9 +123,9 @@ BOOST_AUTO_TEST_CASE(wallet_manager_test) BOOST_CHECK_EQUAL(2u, wm.get_public_keys().size()); keys = wm.list_keys("test", pw); BOOST_CHECK(std::find(keys.cbegin(), keys.cend(), pub_pri_pair(key2)) != keys.cend()); - BOOST_CHECK_THROW(wm.remove_key("test", pw, string(pub_pri_pair(key3).first)), fc::exception); + BOOST_CHECK_THROW(wm.remove_key("test", pw, pub_pri_pair(key3).first.to_string()), fc::exception); BOOST_CHECK_EQUAL(2u, wm.get_public_keys().size()); - BOOST_CHECK_THROW(wm.remove_key("test", "PWnogood", string(pub_pri_pair(key2).first)), wallet_invalid_password_exception); + BOOST_CHECK_THROW(wm.remove_key("test", "PWnogood", pub_pri_pair(key2).first.to_string()), wallet_invalid_password_exception); BOOST_CHECK_EQUAL(2u, wm.get_public_keys().size()); wm.lock("test"); @@ -198,7 +198,7 @@ BOOST_AUTO_TEST_CASE(wallet_manager_test) //now pluck out the private key from the wallet and see if the public key of said // private key matches what was returned earlier from the create_key() call private_key_type create_key_priv(wm.list_keys("testgen", pw).cbegin()->second); - BOOST_CHECK_EQUAL((string)create_key_pub, (string)create_key_priv.get_public_key()); + BOOST_CHECK_EQUAL(create_key_pub.to_string(), create_key_priv.get_public_key().to_string()); wm.lock("testgen"); BOOST_CHECK(fc::exists("testgen.wallet")); diff --git a/unittests/abi_tests.cpp b/unittests/abi_tests.cpp index 9c7a2b2d6ed..b363d4d2e19 100644 --- a/unittests/abi_tests.cpp +++ b/unittests/abi_tests.cpp @@ -892,9 +892,9 @@ BOOST_AUTO_TEST_CASE(updateauth_test) BOOST_TEST(2147483145u == updauth.auth.threshold); BOOST_TEST_REQUIRE(2u == updauth.auth.keys.size()); - BOOST_TEST("EOS65rXebLhtk2aTTzP4e9x1AQZs7c5NNXJp89W8R3HyaA6Zyd4im" == (std::string)updauth.auth.keys[0].key); + BOOST_TEST("EOS65rXebLhtk2aTTzP4e9x1AQZs7c5NNXJp89W8R3HyaA6Zyd4im" == updauth.auth.keys[0].key.to_string()); BOOST_TEST(57005u == updauth.auth.keys[0].weight); - BOOST_TEST("EOS5eVr9TVnqwnUBNwf9kwMTbrHvX5aPyyEG97dz2b2TNeqWRzbJf" == (std::string)updauth.auth.keys[1].key); + BOOST_TEST("EOS5eVr9TVnqwnUBNwf9kwMTbrHvX5aPyyEG97dz2b2TNeqWRzbJf" == updauth.auth.keys[1].key.to_string()); BOOST_TEST(57605u == updauth.auth.keys[1].weight); BOOST_TEST_REQUIRE(2u == updauth.auth.accounts.size()); @@ -996,9 +996,9 @@ BOOST_AUTO_TEST_CASE(newaccount_test) BOOST_TEST(2147483145u == newacct.owner.threshold); BOOST_TEST_REQUIRE(2u == newacct.owner.keys.size()); - BOOST_TEST("EOS65rXebLhtk2aTTzP4e9x1AQZs7c5NNXJp89W8R3HyaA6Zyd4im" == (std::string)newacct.owner.keys[0].key); + BOOST_TEST("EOS65rXebLhtk2aTTzP4e9x1AQZs7c5NNXJp89W8R3HyaA6Zyd4im" == newacct.owner.keys[0].key.to_string()); BOOST_TEST(57005u == newacct.owner.keys[0].weight); - BOOST_TEST("EOS5eVr9TVnqwnUBNwf9kwMTbrHvX5aPyyEG97dz2b2TNeqWRzbJf" == (std::string)newacct.owner.keys[1].key); + BOOST_TEST("EOS5eVr9TVnqwnUBNwf9kwMTbrHvX5aPyyEG97dz2b2TNeqWRzbJf" == newacct.owner.keys[1].key.to_string()); BOOST_TEST(57605u == newacct.owner.keys[1].weight); BOOST_TEST_REQUIRE(2u == newacct.owner.accounts.size()); @@ -1012,9 +1012,9 @@ BOOST_AUTO_TEST_CASE(newaccount_test) BOOST_TEST(2146483145u == newacct.active.threshold); BOOST_TEST_REQUIRE(2u == newacct.active.keys.size()); - BOOST_TEST("EOS65rXebLhtk2aTTzP4e9x1AQZs7c5NNXJp89W8R3HyaA6Zyd4im" == (std::string)newacct.active.keys[0].key); + BOOST_TEST("EOS65rXebLhtk2aTTzP4e9x1AQZs7c5NNXJp89W8R3HyaA6Zyd4im" == newacct.active.keys[0].key.to_string()); BOOST_TEST(57005u == newacct.active.keys[0].weight); - BOOST_TEST("EOS5eVr9TVnqwnUBNwf9kwMTbrHvX5aPyyEG97dz2b2TNeqWRzbJf" == (std::string)newacct.active.keys[1].key); + BOOST_TEST("EOS5eVr9TVnqwnUBNwf9kwMTbrHvX5aPyyEG97dz2b2TNeqWRzbJf" == newacct.active.keys[1].key.to_string()); BOOST_TEST(57605u == newacct.active.keys[1].weight); BOOST_TEST_REQUIRE(2u == newacct.active.accounts.size()); diff --git a/unittests/api_tests.cpp b/unittests/api_tests.cpp index 7f40f291ef2..a5e745436bd 100644 --- a/unittests/api_tests.cpp +++ b/unittests/api_tests.cpp @@ -739,7 +739,7 @@ BOOST_FIXTURE_TEST_CASE(cfa_tx_signature, TESTER) try { set_transaction_headers(tx2); const private_key_type& priv_key = get_private_key(name("dummy"), "active"); - BOOST_TEST((std::string)tx1.sign(priv_key, control->get_chain_id()) != (std::string)tx2.sign(priv_key, control->get_chain_id())); + BOOST_TEST(tx1.sign(priv_key, control->get_chain_id()).to_string() != tx2.sign(priv_key, control->get_chain_id()).to_string()); BOOST_REQUIRE_EQUAL( validate(), true ); } FC_LOG_AND_RETHROW() diff --git a/unittests/auth_tests.cpp b/unittests/auth_tests.cpp index 3bc01bc18bc..7c440ca3ed1 100644 --- a/unittests/auth_tests.cpp +++ b/unittests/auth_tests.cpp @@ -306,14 +306,14 @@ try { BOOST_TEST(joe_owner_authority.auth.threshold == 1u); BOOST_TEST(joe_owner_authority.auth.accounts.size() == 1u); BOOST_TEST(joe_owner_authority.auth.keys.size() == 1u); - BOOST_TEST(string(joe_owner_authority.auth.keys[0].key) == string(chain.get_public_key(name("joe"), "owner"))); + BOOST_TEST(joe_owner_authority.auth.keys[0].key.to_string() == chain.get_public_key(name("joe"), "owner").to_string()); BOOST_TEST(joe_owner_authority.auth.keys[0].weight == 1u); const auto& joe_active_authority = chain.get(boost::make_tuple(name("joe"), name("active"))); BOOST_TEST(joe_active_authority.auth.threshold == 1u); BOOST_TEST(joe_active_authority.auth.accounts.size() == 1u); BOOST_TEST(joe_active_authority.auth.keys.size() == 1u); - BOOST_TEST(string(joe_active_authority.auth.keys[0].key) == string(chain.get_public_key(name("joe"), "active"))); + BOOST_TEST(joe_active_authority.auth.keys[0].key.to_string() == chain.get_public_key(name("joe"), "active").to_string()); BOOST_TEST(joe_active_authority.auth.keys[0].weight == 1u); // Create duplicate name