From 1352289d31b419d55553a810fb4f543cc064bc5c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rn=20Franke?= Date: Wed, 17 Mar 2021 22:20:23 +0100 Subject: [PATCH] bump up versions --- build.sbt | 8 +- ...oinBlockDSSparkMasterIntegrationSpec.scala | 88 +++++++++---------- ...nsactionDSSparkMasterIntegrationSpec.scala | 4 +- .../bitcoin/block/BitcoinBlockRelation.scala | 2 +- .../spark/bitcoin/model/BitcoinBlock.scala | 38 ++++---- .../BitcoinTransactionRelation.scala | 6 +- .../block/EthereumBlockRelation.scala | 6 +- 7 files changed, 76 insertions(+), 76 deletions(-) diff --git a/build.sbt b/build.sbt index 867bbca..819a077 100644 --- a/build.sbt +++ b/build.sbt @@ -6,20 +6,20 @@ lazy val root = (project in file(".")) name := "spark-hadoopcryptoledger-ds", - version := "1.2.1", + version := "1.3.0", scalaVersion := "2.11.12", crossScalaVersions := Seq("2.11.12","2.12.10"), libraryDependencies ++= Seq( - "com.github.zuinnote" % "hadoopcryptoledger-fileformat" % "1.2.1" % "compile", + "com.github.zuinnote" % "hadoopcryptoledger-fileformat" % "1.3.0" % "compile", - "org.bouncycastle" % "bcprov-ext-jdk15on" % "1.64" % "compile", + "org.bouncycastle" % "bcprov-ext-jdk15on" % "1.68" % "compile", "org.apache.spark" %% "spark-core" % "2.4.4" % "provided", "org.apache.spark" %% "spark-sql" % "2.4.4" % "provided", "org.apache.hadoop" % "hadoop-client" % "2.7.0" % "provided", - "org.apache.logging.log4j" % "log4j-api" % "2.4.1" % "provided", + "org.apache.logging.log4j" % "log4j-api" % "2.14.0" % "provided", "org.scalatest" %% "scalatest" % "3.1.0" % "test,it", diff --git a/src/it/scala/org/zuinnote/spark/bitcoin/block/SparkBitcoinBlockDSSparkMasterIntegrationSpec.scala b/src/it/scala/org/zuinnote/spark/bitcoin/block/SparkBitcoinBlockDSSparkMasterIntegrationSpec.scala index fdd3323..f0fbe91 100644 --- a/src/it/scala/org/zuinnote/spark/bitcoin/block/SparkBitcoinBlockDSSparkMasterIntegrationSpec.scala +++ b/src/it/scala/org/zuinnote/spark/bitcoin/block/SparkBitcoinBlockDSSparkMasterIntegrationSpec.scala @@ -114,19 +114,19 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef assert("transactions" == df.columns(9)) // validate block data val blockSize = df.select("blockSize").collect - assert(285 == blockSize(0).getInt(0)) + assert(285 == blockSize(0).getLong(0)) val magicNo = df.select("magicNo").collect val magicNoExpected: Array[Byte] = Array(0xF9.toByte, 0xBE.toByte, 0xB4.toByte, 0xD9.toByte) assert(magicNoExpected.deep == magicNo(0).get(0).asInstanceOf[Array[Byte]].deep) val version = df.select("version").collect - assert(1 == version(0).getInt(0)) + assert(1 == version(0).getLong(0)) val time = df.select("time").collect - assert(1231006505 == time(0).getInt(0)) + assert(1231006505 == time(0).getLong(0)) val bits = df.select("bits").collect val bitsExpected: Array[Byte] = Array(0xFF.toByte, 0xFF.toByte, 0x00.toByte, 0x1D.toByte) assert(bitsExpected.deep == bits(0).get(0).asInstanceOf[Array[Byte]].deep) val nonce = df.select("nonce").collect - assert(2083236893 == nonce(0).getInt(0)) + assert(2083236893 == nonce(0).getLong(0)) val transactionCounter = df.select("transactionCounter").collect assert(1 == transactionCounter(0).getLong(0)) val hashPrevBlock = df.select("hashPrevBlock").collect @@ -142,7 +142,7 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef val transactionsDFCount = transactionsDF.count assert(1 == transactionsDFCount) val transactionsVersion = transactionsDF.select("transactions.version").collect - assert(1 == transactionsVersion(0).getInt(0)) + assert(1 == transactionsVersion(0).getLong(0)) val inCounter = transactionsDF.select("transactions.inCounter").collect val inCounterExpected: Array[Byte] = Array(0x01.toByte) assert(inCounterExpected.deep == inCounter(0).get(0).asInstanceOf[Array[Byte]].deep) @@ -150,7 +150,7 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef val outCounterExpected: Array[Byte] = Array(0x01.toByte) assert(outCounterExpected.deep == outCounter(0).get(0).asInstanceOf[Array[Byte]].deep) val transactionsLockTime = transactionsDF.select("transactions.lockTime").collect - assert(0 == transactionsLockTime(0).getInt(0)) + assert(0 == transactionsLockTime(0).getLong(0)) val transactionsLOIDF = transactionsDF.select(explode(transactionsDF("transactions.listOfInputs")).alias("listOfInputs")) val prevTransactionHash = transactionsLOIDF.select("listOfInputs.prevTransactionHash").collect val prevTransactionHashExpected: Array[Byte] = Array(0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte) @@ -212,19 +212,19 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef assert("transactions" == df.columns(9)) // validate block data val blockSize = df.select("blockSize").collect - assert(285 == blockSize(0).getInt(0)) + assert(285 == blockSize(0).getLong(0)) val magicNo = df.select("magicNo").collect val magicNoExpected: Array[Byte] = Array(0xF9.toByte, 0xBE.toByte, 0xB4.toByte, 0xD9.toByte) assert(magicNoExpected.deep == magicNo(0).get(0).asInstanceOf[Array[Byte]].deep) val version = df.select("version").collect - assert(1 == version(0).getInt(0)) + assert(1 == version(0).getLong(0)) val time = df.select("time").collect - assert(1231006505 == time(0).getInt(0)) + assert(1231006505 == time(0).getLong(0)) val bits = df.select("bits").collect val bitsExpected: Array[Byte] = Array(0xFF.toByte, 0xFF.toByte, 0x00.toByte, 0x1D.toByte) assert(bitsExpected.deep == bits(0).get(0).asInstanceOf[Array[Byte]].deep) val nonce = df.select("nonce").collect - assert(2083236893 == nonce(0).getInt(0)) + assert(2083236893 == nonce(0).getLong(0)) val transactionCounter = df.select("transactionCounter").collect assert(1 == transactionCounter(0).getLong(0)) val hashPrevBlock = df.select("hashPrevBlock").collect @@ -240,7 +240,7 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef val transactionsDFCount = transactionsDF.count assert(1 == transactionsDFCount) val transactionsVersion = transactionsDF.select("transactions.version").collect - assert(1 == transactionsVersion(0).getInt(0)) + assert(1 == transactionsVersion(0).getLong(0)) val inCounter = transactionsDF.select("transactions.inCounter").collect val inCounterExpected: Array[Byte] = Array(0x01.toByte) assert(inCounterExpected.deep == inCounter(0).get(0).asInstanceOf[Array[Byte]].deep) @@ -248,7 +248,7 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef val outCounterExpected: Array[Byte] = Array(0x01.toByte) assert(outCounterExpected.deep == outCounter(0).get(0).asInstanceOf[Array[Byte]].deep) val transactionsLockTime = transactionsDF.select("transactions.lockTime").collect - assert(0 == transactionsLockTime(0).getInt(0)) + assert(0 == transactionsLockTime(0).getLong(0)) val transactionsLOIDF = transactionsDF.select(explode(transactionsDF("transactions.listOfInputs")).alias("listOfInputs")) val prevTransactionHash = transactionsLOIDF.select("listOfInputs.prevTransactionHash").collect val prevTransactionHashExpected: Array[Byte] = Array(0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte) @@ -314,19 +314,19 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef assert("transactions" == df.columns(9)) // validate block data val blockSize = df.select("blockSize").collect - assert(285 == blockSize(0).getInt(0)) + assert(285 == blockSize(0).getLong(0)) val magicNo = df.select("magicNo").collect val magicNoExpected: Array[Byte] = Array(0xF9.toByte, 0xBE.toByte, 0xB4.toByte, 0xD9.toByte) assert(magicNoExpected.deep == magicNo(0).get(0).asInstanceOf[Array[Byte]].deep) val version = df.select("version").collect - assert(1 == version(0).getInt(0)) + assert(1 == version(0).getLong(0)) val time = df.select("time").collect - assert(1231006505 == time(0).getInt(0)) + assert(1231006505 == time(0).getLong(0)) val bits = df.select("bits").collect val bitsExpected: Array[Byte] = Array(0xFF.toByte, 0xFF.toByte, 0x00.toByte, 0x1D.toByte) assert(bitsExpected.deep == bits(0).get(0).asInstanceOf[Array[Byte]].deep) val nonce = df.select("nonce").collect - assert(2083236893 == nonce(0).getInt(0)) + assert(2083236893 == nonce(0).getLong(0)) val transactionCounter = df.select("transactionCounter").collect assert(1 == transactionCounter(0).getLong(0)) val hashPrevBlock = df.select("hashPrevBlock").collect @@ -347,7 +347,7 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef assert(currentTransactionHashExpected.deep == currentTransactionHash(0).get(0).asInstanceOf[Array[Byte]].deep) val transactionsVersion = transactionsDF.select("transactions.version").collect - assert(1 == transactionsVersion(0).getInt(0)) + assert(1 == transactionsVersion(0).getLong(0)) val inCounter = transactionsDF.select("transactions.inCounter").collect val inCounterExpected: Array[Byte] = Array(0x01.toByte) assert(inCounterExpected.deep == inCounter(0).get(0).asInstanceOf[Array[Byte]].deep) @@ -355,7 +355,7 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef val outCounterExpected: Array[Byte] = Array(0x01.toByte) assert(outCounterExpected.deep == outCounter(0).get(0).asInstanceOf[Array[Byte]].deep) val transactionsLockTime = transactionsDF.select("transactions.lockTime").collect - assert(0 == transactionsLockTime(0).getInt(0)) + assert(0 == transactionsLockTime(0).getLong(0)) val transactionsLOIDF = transactionsDF.select(explode(transactionsDF("transactions.listOfInputs")).alias("listOfInputs")) val prevTransactionHash = transactionsLOIDF.select("listOfInputs.prevTransactionHash").collect val prevTransactionHashExpected: Array[Byte] = Array(0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte) @@ -417,19 +417,19 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef assert("transactions" == df.columns(9)) // validate block data val blockSize = df.select("blockSize").collect - assert(285 == blockSize(0).getInt(0)) + assert(285 == blockSize(0).getLong(0)) val magicNo = df.select("magicNo").collect val magicNoExpected: Array[Byte] = Array(0xF9.toByte, 0xBE.toByte, 0xB4.toByte, 0xD9.toByte) assert(magicNoExpected.deep == magicNo(0).get(0).asInstanceOf[Array[Byte]].deep) val version = df.select("version").collect - assert(1 == version(0).getInt(0)) + assert(1 == version(0).getLong(0)) val time = df.select("time").collect - assert(1231006505 == time(0).getInt(0)) + assert(1231006505 == time(0).getLong(0)) val bits = df.select("bits").collect val bitsExpected: Array[Byte] = Array(0xFF.toByte, 0xFF.toByte, 0x00.toByte, 0x1D.toByte) assert(bitsExpected.deep == bits(0).get(0).asInstanceOf[Array[Byte]].deep) val nonce = df.select("nonce").collect - assert(2083236893 == nonce(0).getInt(0)) + assert(2083236893 == nonce(0).getLong(0)) val transactionCounter = df.select("transactionCounter").collect assert(1 == transactionCounter(0).getLong(0)) val hashPrevBlock = df.select("hashPrevBlock").collect @@ -450,7 +450,7 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef assert(currentTransactionHashExpected.deep == currentTransactionHash(0).get(0).asInstanceOf[Array[Byte]].deep) val transactionsVersion = transactionsDF.select("transactions.version").collect - assert(1 == transactionsVersion(0).getInt(0)) + assert(1 == transactionsVersion(0).getLong(0)) val inCounter = transactionsDF.select("transactions.inCounter").collect val inCounterExpected: Array[Byte] = Array(0x01.toByte) assert(inCounterExpected.deep == inCounter(0).get(0).asInstanceOf[Array[Byte]].deep) @@ -458,7 +458,7 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef val outCounterExpected: Array[Byte] = Array(0x01.toByte) assert(outCounterExpected.deep == outCounter(0).get(0).asInstanceOf[Array[Byte]].deep) val transactionsLockTime = transactionsDF.select("transactions.lockTime").collect - assert(0 == transactionsLockTime(0).getInt(0)) + assert(0 == transactionsLockTime(0).getLong(0)) val transactionsLOIDF = transactionsDF.select(explode(transactionsDF("transactions.listOfInputs")).alias("listOfInputs")) val prevTransactionHash = transactionsLOIDF.select("listOfInputs.prevTransactionHash").collect val prevTransactionHashExpected: Array[Byte] = Array(0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte) @@ -524,19 +524,19 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef assert("transactions" == df.columns(9)) // validate block data val blockSize = df.select("blockSize").collect - assert(999275 == blockSize(0).getInt(0)) + assert(999275 == blockSize(0).getLong(0)) val magicNo = df.select("magicNo").collect val magicNoExpected: Array[Byte] = Array(0xF9.toByte, 0xBE.toByte, 0xB4.toByte, 0xD9.toByte) assert(magicNoExpected.deep == magicNo(0).get(0).asInstanceOf[Array[Byte]].deep) val version = df.select("version").collect - assert(536870914 == version(0).getInt(0)) + assert(536870914 == version(0).getLong(0)) val time = df.select("time").collect - assert(1503889880 == time(0).getInt(0)) + assert(1503889880 == time(0).getLong(0)) val bits = df.select("bits").collect val bitsExpected: Array[Byte] = Array(0xE9.toByte, 0x3C.toByte, 0x01.toByte, 0x18.toByte) assert(bitsExpected.deep == bits(0).get(0).asInstanceOf[Array[Byte]].deep) val nonce = df.select("nonce").collect - assert(184429655 == nonce(0).getInt(0)) + assert(184429655 == nonce(0).getLong(0)) val transactionCounter = df.select("transactionCounter").collect assert(470 == transactionCounter(0).getLong(0)) // validate transactions @@ -584,22 +584,22 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef val nonce = df.select("nonce").collect // first block // validate block data - assert(1000031 == blockSize(0).getInt(0)) + assert(1000031 == blockSize(0).getLong(0)) assert(magicNoExpected.deep == magicNo(0).get(0).asInstanceOf[Array[Byte]].deep) - assert(536870912 == version(0).getInt(0)) - assert(1503863706 == time(0).getInt(0)) + assert(536870912 == version(0).getLong(0)) + assert(1503863706 == time(0).getLong(0)) assert(bitsExpected.deep == bits(0).get(0).asInstanceOf[Array[Byte]].deep) - assert(-706531299 == nonce(0).getInt(0)) + assert(-706531299 == nonce(0).getLong(0)) assert(2191 == transactionCounter(0).getLong(0)) // second block // validate block data - assert(999304 == blockSize(1).getInt(0)) + assert(999304 == blockSize(1).getLong(0)) assert(magicNoExpected.deep == magicNo(1).get(0).asInstanceOf[Array[Byte]].deep) - assert(536870912 == version(1).getInt(0)) - assert(1503836377 == time(1).getInt(0)) + assert(536870912 == version(1).getLong(0)) + assert(1503836377 == time(1).getLong(0)) assert(bitsExpected.deep == bits(1).get(0).asInstanceOf[Array[Byte]].deep) - assert(-566627396 == nonce(1).getInt(0)) + assert(-566627396 == nonce(1).getLong(0)) assert(2508 == transactionCounter(1).getLong(0)) // check transactions val transactionsDF = df.select(explode(df("transactions")).alias("transactions")) @@ -638,19 +638,19 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef assert("auxPOW" == df.columns(10)) // validate block data val blockSize = df.select("blockSize").collect - assert(3125 == blockSize(0).getInt(0)) + assert(3125 == blockSize(0).getLong(0)) val magicNo = df.select("magicNo").collect val magicNoExpected: Array[Byte] = Array(0xF9.toByte, 0xBE.toByte, 0xB4.toByte, 0xFE.toByte) assert(magicNoExpected.deep == magicNo(0).get(0).asInstanceOf[Array[Byte]].deep) val version = df.select("version").collect - assert(65796 == version(0).getInt(0)) + assert(65796 == version(0).getLong(0)) val time = df.select("time").collect - assert(1506767051 == time(0).getInt(0)) + assert(1506767051 == time(0).getLong(0)) val bits = df.select("bits").collect val bitsExpected: Array[Byte] = Array(0x71.toByte, 0x63.toByte, 0x01.toByte, 0x18.toByte) assert(bitsExpected.deep == bits(0).get(0).asInstanceOf[Array[Byte]].deep) val nonce = df.select("nonce").collect - assert(0 == nonce(0).getInt(0)) + assert(0 == nonce(0).getLong(0)) val transactionCounter = df.select("transactionCounter").collect assert(7 == transactionCounter(0).getLong(0)) // validate transactions @@ -690,19 +690,19 @@ class SparkBitcoinBlockDSSparkMasterIntegrationSpec extends AnyFlatSpec with Bef assert("auxPOW" == df.columns(10)) // validate block data val blockSize = df.select("blockSize").collect - assert(3125 == blockSize(0).getInt(0)) + assert(3125 == blockSize(0).getLong(0)) val magicNo = df.select("magicNo").collect val magicNoExpected: Array[Byte] = Array(0xF9.toByte, 0xBE.toByte, 0xB4.toByte, 0xFE.toByte) assert(magicNoExpected.deep == magicNo(0).get(0).asInstanceOf[Array[Byte]].deep) val version = df.select("version").collect - assert(65796 == version(0).getInt(0)) + assert(65796 == version(0).getLong(0)) val time = df.select("time").collect - assert(1506767051 == time(0).getInt(0)) + assert(1506767051 == time(0).getLong(0)) val bits = df.select("bits").collect val bitsExpected: Array[Byte] = Array(0x71.toByte, 0x63.toByte, 0x01.toByte, 0x18.toByte) assert(bitsExpected.deep == bits(0).get(0).asInstanceOf[Array[Byte]].deep) val nonce = df.select("nonce").collect - assert(0 == nonce(0).getInt(0)) + assert(0 == nonce(0).getLong(0)) val transactionCounter = df.select("transactionCounter").collect assert(7 == transactionCounter(0).getLong(0)) // validate transactions diff --git a/src/it/scala/org/zuinnote/spark/bitcoin/transaction/SparkBitcoinTransactionDSSparkMasterIntegrationSpec.scala b/src/it/scala/org/zuinnote/spark/bitcoin/transaction/SparkBitcoinTransactionDSSparkMasterIntegrationSpec.scala index 4c8d146..1b82e11 100644 --- a/src/it/scala/org/zuinnote/spark/bitcoin/transaction/SparkBitcoinTransactionDSSparkMasterIntegrationSpec.scala +++ b/src/it/scala/org/zuinnote/spark/bitcoin/transaction/SparkBitcoinTransactionDSSparkMasterIntegrationSpec.scala @@ -118,7 +118,7 @@ class SparkBitcoinTransactionDSSparkMasterIntegrationSpec extends AnyFlatSpec wi 0x7F.toByte, 0xC8.toByte, 0x1B.toByte, 0xC3.toByte, 0x88.toByte, 0x8A.toByte, 0x51.toByte, 0x32.toByte, 0x3A.toByte, 0x9F.toByte, 0xB8.toByte, 0xAA.toByte, 0x4B.toByte, 0x1E.toByte, 0x5E.toByte, 0x4A.toByte) assert(currentTransactionHashExpected.deep == currentTransactionHash(0).get(0).asInstanceOf[Array[Byte]].deep) val version = df.select("version").collect - assert(1 == version(0).getInt(0)) + assert(1 == version(0).getLong(0)) val inCounter = df.select("inCounter").collect val inCounterExpected: Array[Byte] = Array(0x01.toByte) assert(inCounterExpected.deep == inCounter(0).get(0).asInstanceOf[Array[Byte]].deep) @@ -126,7 +126,7 @@ class SparkBitcoinTransactionDSSparkMasterIntegrationSpec extends AnyFlatSpec wi val outCounterExpected: Array[Byte] = Array(0x01.toByte) assert(outCounterExpected.deep == outCounter(0).get(0).asInstanceOf[Array[Byte]].deep) val transactionsLockTime = df.select("lockTime").collect - assert(0 == transactionsLockTime(0).getInt(0)) + assert(0 == transactionsLockTime(0).getLong(0)) val transactionsLOIDF = df.select(explode(df("listOfInputs")).alias("listOfInputs")) val prevTransactionHash = transactionsLOIDF.select("listOfInputs.prevTransactionHash").collect val prevTransactionHashExpected: Array[Byte] = Array(0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte) diff --git a/src/main/scala/org/zuinnote/spark/bitcoin/block/BitcoinBlockRelation.scala b/src/main/scala/org/zuinnote/spark/bitcoin/block/BitcoinBlockRelation.scala index 3b1cdb7..228560e 100644 --- a/src/main/scala/org/zuinnote/spark/bitcoin/block/BitcoinBlockRelation.scala +++ b/src/main/scala/org/zuinnote/spark/bitcoin/block/BitcoinBlockRelation.scala @@ -21,7 +21,7 @@ import org.apache.spark.rdd.RDD import org.apache.spark.sql.sources.{BaseRelation, TableScan} import org.apache.spark.sql.types.StructType import org.apache.spark.sql.{Encoders, Row, SQLContext} -import org.zuinnote.hadoop.bitcoin.format.common.{BitcoinBlock => RawBitcoinBlock} +import org.zuinnote.hadoop.bitcoin.format.common.{BitcoinBlockWritable => RawBitcoinBlock} import org.zuinnote.hadoop.bitcoin.format.mapreduce._ import org.zuinnote.spark.bitcoin.model._ diff --git a/src/main/scala/org/zuinnote/spark/bitcoin/model/BitcoinBlock.scala b/src/main/scala/org/zuinnote/spark/bitcoin/model/BitcoinBlock.scala index c0fbb02..3526614 100644 --- a/src/main/scala/org/zuinnote/spark/bitcoin/model/BitcoinBlock.scala +++ b/src/main/scala/org/zuinnote/spark/bitcoin/model/BitcoinBlock.scala @@ -32,9 +32,9 @@ final case class ScriptWitness(witnessScriptLength: Array[Byte], witnessScript: final case class ScriptWitnessItem(stackItemCounter: Array[Byte], scriptWitnessList: Seq[ScriptWitness]) -final case class Transaction(version: Int, marker: Byte, flag: Byte, inCounter: Array[Byte], outCounter: Array[Byte], +final case class Transaction(version: Long, marker: Byte, flag: Byte, inCounter: Array[Byte], outCounter: Array[Byte], listOfInputs: Seq[Input], listOfOutputs: Seq[Output], - listOfScriptWitnessItem: Seq[ScriptWitnessItem], lockTime: Int) { + listOfScriptWitnessItem: Seq[ScriptWitnessItem], lockTime: Long) { private[bitcoin] def enriched(currentTransactionHash: Array[Byte]): EnrichedTransaction = { EnrichedTransaction( @@ -44,13 +44,13 @@ final case class Transaction(version: Int, marker: Byte, flag: Byte, inCounter: } } -final case class SingleTransaction(currentTransactionHash: Array[Byte], version: Int, marker: Byte, flag: Byte, +final case class SingleTransaction(currentTransactionHash: Array[Byte], version: Long, marker: Byte, flag: Byte, inCounter: Array[Byte], outCounter: Array[Byte], listOfInputs: Seq[Input], listOfOutputs: Seq[Output], listOfScriptWitnessItem: Seq[ScriptWitnessItem], - lockTime: Int) + lockTime: Long) -final case class BitcoinBlock(blockSize: Int, magicNo: Array[Byte], version: Int, time: Int, bits: Array[Byte], - nonce: Int, transactionCounter: Long, hashPrevBlock: Array[Byte], +final case class BitcoinBlock(blockSize: Long, magicNo: Array[Byte], version: Long, time: Long, bits: Array[Byte], + nonce: Long, transactionCounter: Long, hashPrevBlock: Array[Byte], hashMerkleRoot: Array[Byte], transactions: Seq[Transaction]) extends CanAddAuxPOW { @@ -68,18 +68,18 @@ final case class BitcoinBlock(blockSize: Int, magicNo: Array[Byte], version: Int } } -final case class BitcoinBlockWithAuxPOW(blockSize: Int, magicNo: Array[Byte], version: Int, time: Int, - bits: Array[Byte], nonce: Int, transactionCounter: Long, +final case class BitcoinBlockWithAuxPOW(blockSize: Long, magicNo: Array[Byte], version: Long, time: Long, + bits: Array[Byte], nonce: Long, transactionCounter: Long, hashPrevBlock: Array[Byte], hashMerkleRoot: Array[Byte], transactions: Seq[Transaction], auxPOW: AuxPOW) -final case class EnrichedTransaction(version: Int, marker: Byte, flag: Byte, inCounter: Array[Byte], +final case class EnrichedTransaction(version: Long, marker: Byte, flag: Byte, inCounter: Array[Byte], outCounter: Array[Byte], listOfInputs: Seq[Input], listOfOutputs: Seq[Output], - listOfScriptWitnessItem: Seq[ScriptWitnessItem], lockTime: Int, + listOfScriptWitnessItem: Seq[ScriptWitnessItem], lockTime: Long, currentTransactionHash: Array[Byte]) -final case class EnrichedBitcoinBlock(blockSize: Int, magicNo: Array[Byte], version: Int, time: Int, bits: Array[Byte], - nonce: Int, transactionCounter: Long, hashPrevBlock: Array[Byte], +final case class EnrichedBitcoinBlock(blockSize: Long, magicNo: Array[Byte], version: Long, time: Long, bits: Array[Byte], + nonce: Long, transactionCounter: Long, hashPrevBlock: Array[Byte], hashMerkleRoot: Array[Byte], transactions: Seq[EnrichedTransaction]) extends CanAddAuxPOW { @@ -91,23 +91,23 @@ final case class EnrichedBitcoinBlock(blockSize: Int, magicNo: Array[Byte], vers } } -final case class EnrichedBitcoinBlockWithAuxPOW(blockSize: Int, magicNo: Array[Byte], version: Int, time: Int, - bits: Array[Byte], nonce: Int, transactionCounter: Long, +final case class EnrichedBitcoinBlockWithAuxPOW(blockSize: Long, magicNo: Array[Byte], version: Long, time: Long, + bits: Array[Byte], nonce: Long, transactionCounter: Long, hashPrevBlock: Array[Byte], hashMerkleRoot: Array[Byte], transactions: Seq[EnrichedTransaction], auxPOW: AuxPOW) -final case class ParentBlockHeader(version: Int, previousBlockHash: Array[Byte], merkleRoot: Array[Byte], time: Int, - bits: Array[Byte], nonce: Int) +final case class ParentBlockHeader(version: Long, previousBlockHash: Array[Byte], merkleRoot: Array[Byte], time: Long, + bits: Array[Byte], nonce: Long) -final case class CoinbaseTransaction(version: Int, inCounter: Array[Byte], outCounter: Array[Byte], - listOfInputs: Seq[Input], listOfOutputs: Seq[Output], lockTime: Int) +final case class CoinbaseTransaction(version: Long, inCounter: Array[Byte], outCounter: Array[Byte], + listOfInputs: Seq[Input], listOfOutputs: Seq[Output], lockTime: Long) final case class CoinbaseBranch(numberOfLinks: Array[Byte], links: Seq[Array[Byte]], branchSideBitmask: Array[Byte]) final case class AuxBlockChainBranch(numberOfLinks: Array[Byte], links: Seq[Array[Byte]], branchSideBitmask: Array[Byte]) -final case class AuxPOW(version: Int, coinbaseTransaction: CoinbaseTransaction, parentBlockHeaderHash: Array[Byte], +final case class AuxPOW(version: Long, coinbaseTransaction: CoinbaseTransaction, parentBlockHeaderHash: Array[Byte], coinbaseBranch: CoinbaseBranch, auxBlockChainBranch: AuxBlockChainBranch, parentBlockHeader: ParentBlockHeader) diff --git a/src/main/scala/org/zuinnote/spark/bitcoin/transaction/BitcoinTransactionRelation.scala b/src/main/scala/org/zuinnote/spark/bitcoin/transaction/BitcoinTransactionRelation.scala index c4f2133..1f61e30 100644 --- a/src/main/scala/org/zuinnote/spark/bitcoin/transaction/BitcoinTransactionRelation.scala +++ b/src/main/scala/org/zuinnote/spark/bitcoin/transaction/BitcoinTransactionRelation.scala @@ -21,7 +21,7 @@ import org.apache.spark.rdd.RDD import org.apache.spark.sql.sources.{BaseRelation, TableScan} import org.apache.spark.sql.types.StructType import org.apache.spark.sql.{Encoders, Row, SQLContext} -import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransaction +import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransactionWritable import org.zuinnote.hadoop.bitcoin.format.mapreduce._ import org.zuinnote.spark.bitcoin.model._ @@ -62,7 +62,7 @@ final case class BitcoinTransactionRelation(location: String, .map(Row.fromTuple) } - private def readRawTransactionRDD(): RDD[(BytesWritable, BitcoinTransaction)] = { + private def readRawTransactionRDD(): RDD[(BytesWritable, BitcoinTransactionWritable)] = { // create hadoopConf val hadoopConf = new Configuration() hadoopConf.set(AbstractBitcoinRecordReader.CONF_MAXBLOCKSIZE, String.valueOf(maxBlockSize)) @@ -74,7 +74,7 @@ final case class BitcoinTransactionRelation(location: String, location, classOf[BitcoinTransactionFileInputFormat], classOf[BytesWritable], - classOf[BitcoinTransaction], + classOf[BitcoinTransactionWritable], hadoopConf ) } diff --git a/src/main/scala/org/zuinnote/spark/ethereum/block/EthereumBlockRelation.scala b/src/main/scala/org/zuinnote/spark/ethereum/block/EthereumBlockRelation.scala index e5abf20..46b633a 100644 --- a/src/main/scala/org/zuinnote/spark/ethereum/block/EthereumBlockRelation.scala +++ b/src/main/scala/org/zuinnote/spark/ethereum/block/EthereumBlockRelation.scala @@ -54,7 +54,7 @@ final case class EthereumBlockRelation(location: String, * returns EthereumBlocks as rows **/ override def buildScan: RDD[Row] = { - val ethereumBlockRDD: RDD[(BytesWritable, common.EthereumBlock)] = readRawBlockRDD() + val ethereumBlockRDD: RDD[(BytesWritable, common.EthereumBlockWritable)] = readRawBlockRDD() if (enrich) { ethereumBlockRDD @@ -67,7 +67,7 @@ final case class EthereumBlockRelation(location: String, } } - private def readRawBlockRDD(): RDD[(BytesWritable, common.EthereumBlock)] = { + private def readRawBlockRDD(): RDD[(BytesWritable, common.EthereumBlockWritable)] = { // create hadoopConf val hadoopConf = new Configuration() hadoopConf.set(AbstractEthereumRecordReader.CONF_MAXBLOCKSIZE, String.valueOf(maxBlockSize)) @@ -77,7 +77,7 @@ final case class EthereumBlockRelation(location: String, location, classOf[EthereumBlockFileInputFormat], classOf[BytesWritable], - classOf[common.EthereumBlock], + classOf[common.EthereumBlockWritable], hadoopConf ) }