Skip to content

Commit

Permalink
bump up versions
Browse files Browse the repository at this point in the history
  • Loading branch information
jornfranke committed Mar 17, 2021
1 parent d2d7a7b commit 1352289
Show file tree
Hide file tree
Showing 7 changed files with 76 additions and 76 deletions.
8 changes: 4 additions & 4 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,20 @@ lazy val root = (project in file("."))

name := "spark-hadoopcryptoledger-ds",

version := "1.2.1",
version := "1.3.0",

scalaVersion := "2.11.12",

crossScalaVersions := Seq("2.11.12","2.12.10"),

libraryDependencies ++= Seq(
"com.github.zuinnote" % "hadoopcryptoledger-fileformat" % "1.2.1" % "compile",
"com.github.zuinnote" % "hadoopcryptoledger-fileformat" % "1.3.0" % "compile",

"org.bouncycastle" % "bcprov-ext-jdk15on" % "1.64" % "compile",
"org.bouncycastle" % "bcprov-ext-jdk15on" % "1.68" % "compile",
"org.apache.spark" %% "spark-core" % "2.4.4" % "provided",
"org.apache.spark" %% "spark-sql" % "2.4.4" % "provided",
"org.apache.hadoop" % "hadoop-client" % "2.7.0" % "provided",
"org.apache.logging.log4j" % "log4j-api" % "2.4.1" % "provided",
"org.apache.logging.log4j" % "log4j-api" % "2.14.0" % "provided",

"org.scalatest" %% "scalatest" % "3.1.0" % "test,it",

Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -118,15 +118,15 @@ class SparkBitcoinTransactionDSSparkMasterIntegrationSpec extends AnyFlatSpec wi
0x7F.toByte, 0xC8.toByte, 0x1B.toByte, 0xC3.toByte, 0x88.toByte, 0x8A.toByte, 0x51.toByte, 0x32.toByte, 0x3A.toByte, 0x9F.toByte, 0xB8.toByte, 0xAA.toByte, 0x4B.toByte, 0x1E.toByte, 0x5E.toByte, 0x4A.toByte)
assert(currentTransactionHashExpected.deep == currentTransactionHash(0).get(0).asInstanceOf[Array[Byte]].deep)
val version = df.select("version").collect
assert(1 == version(0).getInt(0))
assert(1 == version(0).getLong(0))
val inCounter = df.select("inCounter").collect
val inCounterExpected: Array[Byte] = Array(0x01.toByte)
assert(inCounterExpected.deep == inCounter(0).get(0).asInstanceOf[Array[Byte]].deep)
val outCounter = df.select("outCounter").collect
val outCounterExpected: Array[Byte] = Array(0x01.toByte)
assert(outCounterExpected.deep == outCounter(0).get(0).asInstanceOf[Array[Byte]].deep)
val transactionsLockTime = df.select("lockTime").collect
assert(0 == transactionsLockTime(0).getInt(0))
assert(0 == transactionsLockTime(0).getLong(0))
val transactionsLOIDF = df.select(explode(df("listOfInputs")).alias("listOfInputs"))
val prevTransactionHash = transactionsLOIDF.select("listOfInputs.prevTransactionHash").collect
val prevTransactionHashExpected: Array[Byte] = Array(0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.sql.sources.{BaseRelation, TableScan}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{Encoders, Row, SQLContext}
import org.zuinnote.hadoop.bitcoin.format.common.{BitcoinBlock => RawBitcoinBlock}
import org.zuinnote.hadoop.bitcoin.format.common.{BitcoinBlockWritable => RawBitcoinBlock}
import org.zuinnote.hadoop.bitcoin.format.mapreduce._
import org.zuinnote.spark.bitcoin.model._

Expand Down
38 changes: 19 additions & 19 deletions src/main/scala/org/zuinnote/spark/bitcoin/model/BitcoinBlock.scala
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ final case class ScriptWitness(witnessScriptLength: Array[Byte], witnessScript:

final case class ScriptWitnessItem(stackItemCounter: Array[Byte], scriptWitnessList: Seq[ScriptWitness])

final case class Transaction(version: Int, marker: Byte, flag: Byte, inCounter: Array[Byte], outCounter: Array[Byte],
final case class Transaction(version: Long, marker: Byte, flag: Byte, inCounter: Array[Byte], outCounter: Array[Byte],
listOfInputs: Seq[Input], listOfOutputs: Seq[Output],
listOfScriptWitnessItem: Seq[ScriptWitnessItem], lockTime: Int) {
listOfScriptWitnessItem: Seq[ScriptWitnessItem], lockTime: Long) {

private[bitcoin] def enriched(currentTransactionHash: Array[Byte]): EnrichedTransaction = {
EnrichedTransaction(
Expand All @@ -44,13 +44,13 @@ final case class Transaction(version: Int, marker: Byte, flag: Byte, inCounter:
}
}

final case class SingleTransaction(currentTransactionHash: Array[Byte], version: Int, marker: Byte, flag: Byte,
final case class SingleTransaction(currentTransactionHash: Array[Byte], version: Long, marker: Byte, flag: Byte,
inCounter: Array[Byte], outCounter: Array[Byte], listOfInputs: Seq[Input],
listOfOutputs: Seq[Output], listOfScriptWitnessItem: Seq[ScriptWitnessItem],
lockTime: Int)
lockTime: Long)

final case class BitcoinBlock(blockSize: Int, magicNo: Array[Byte], version: Int, time: Int, bits: Array[Byte],
nonce: Int, transactionCounter: Long, hashPrevBlock: Array[Byte],
final case class BitcoinBlock(blockSize: Long, magicNo: Array[Byte], version: Long, time: Long, bits: Array[Byte],
nonce: Long, transactionCounter: Long, hashPrevBlock: Array[Byte],
hashMerkleRoot: Array[Byte], transactions: Seq[Transaction])
extends CanAddAuxPOW {

Expand All @@ -68,18 +68,18 @@ final case class BitcoinBlock(blockSize: Int, magicNo: Array[Byte], version: Int
}
}

final case class BitcoinBlockWithAuxPOW(blockSize: Int, magicNo: Array[Byte], version: Int, time: Int,
bits: Array[Byte], nonce: Int, transactionCounter: Long,
final case class BitcoinBlockWithAuxPOW(blockSize: Long, magicNo: Array[Byte], version: Long, time: Long,
bits: Array[Byte], nonce: Long, transactionCounter: Long,
hashPrevBlock: Array[Byte], hashMerkleRoot: Array[Byte],
transactions: Seq[Transaction], auxPOW: AuxPOW)

final case class EnrichedTransaction(version: Int, marker: Byte, flag: Byte, inCounter: Array[Byte],
final case class EnrichedTransaction(version: Long, marker: Byte, flag: Byte, inCounter: Array[Byte],
outCounter: Array[Byte], listOfInputs: Seq[Input], listOfOutputs: Seq[Output],
listOfScriptWitnessItem: Seq[ScriptWitnessItem], lockTime: Int,
listOfScriptWitnessItem: Seq[ScriptWitnessItem], lockTime: Long,
currentTransactionHash: Array[Byte])

final case class EnrichedBitcoinBlock(blockSize: Int, magicNo: Array[Byte], version: Int, time: Int, bits: Array[Byte],
nonce: Int, transactionCounter: Long, hashPrevBlock: Array[Byte],
final case class EnrichedBitcoinBlock(blockSize: Long, magicNo: Array[Byte], version: Long, time: Long, bits: Array[Byte],
nonce: Long, transactionCounter: Long, hashPrevBlock: Array[Byte],
hashMerkleRoot: Array[Byte], transactions: Seq[EnrichedTransaction])
extends CanAddAuxPOW {

Expand All @@ -91,23 +91,23 @@ final case class EnrichedBitcoinBlock(blockSize: Int, magicNo: Array[Byte], vers
}
}

final case class EnrichedBitcoinBlockWithAuxPOW(blockSize: Int, magicNo: Array[Byte], version: Int, time: Int,
bits: Array[Byte], nonce: Int, transactionCounter: Long,
final case class EnrichedBitcoinBlockWithAuxPOW(blockSize: Long, magicNo: Array[Byte], version: Long, time: Long,
bits: Array[Byte], nonce: Long, transactionCounter: Long,
hashPrevBlock: Array[Byte], hashMerkleRoot: Array[Byte],
transactions: Seq[EnrichedTransaction], auxPOW: AuxPOW)

final case class ParentBlockHeader(version: Int, previousBlockHash: Array[Byte], merkleRoot: Array[Byte], time: Int,
bits: Array[Byte], nonce: Int)
final case class ParentBlockHeader(version: Long, previousBlockHash: Array[Byte], merkleRoot: Array[Byte], time: Long,
bits: Array[Byte], nonce: Long)

final case class CoinbaseTransaction(version: Int, inCounter: Array[Byte], outCounter: Array[Byte],
listOfInputs: Seq[Input], listOfOutputs: Seq[Output], lockTime: Int)
final case class CoinbaseTransaction(version: Long, inCounter: Array[Byte], outCounter: Array[Byte],
listOfInputs: Seq[Input], listOfOutputs: Seq[Output], lockTime: Long)

final case class CoinbaseBranch(numberOfLinks: Array[Byte], links: Seq[Array[Byte]], branchSideBitmask: Array[Byte])

final case class AuxBlockChainBranch(numberOfLinks: Array[Byte], links: Seq[Array[Byte]],
branchSideBitmask: Array[Byte])

final case class AuxPOW(version: Int, coinbaseTransaction: CoinbaseTransaction, parentBlockHeaderHash: Array[Byte],
final case class AuxPOW(version: Long, coinbaseTransaction: CoinbaseTransaction, parentBlockHeaderHash: Array[Byte],
coinbaseBranch: CoinbaseBranch, auxBlockChainBranch: AuxBlockChainBranch,
parentBlockHeader: ParentBlockHeader)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.sql.sources.{BaseRelation, TableScan}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{Encoders, Row, SQLContext}
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransaction
import org.zuinnote.hadoop.bitcoin.format.common.BitcoinTransactionWritable
import org.zuinnote.hadoop.bitcoin.format.mapreduce._
import org.zuinnote.spark.bitcoin.model._

Expand Down Expand Up @@ -62,7 +62,7 @@ final case class BitcoinTransactionRelation(location: String,
.map(Row.fromTuple)
}

private def readRawTransactionRDD(): RDD[(BytesWritable, BitcoinTransaction)] = {
private def readRawTransactionRDD(): RDD[(BytesWritable, BitcoinTransactionWritable)] = {
// create hadoopConf
val hadoopConf = new Configuration()
hadoopConf.set(AbstractBitcoinRecordReader.CONF_MAXBLOCKSIZE, String.valueOf(maxBlockSize))
Expand All @@ -74,7 +74,7 @@ final case class BitcoinTransactionRelation(location: String,
location,
classOf[BitcoinTransactionFileInputFormat],
classOf[BytesWritable],
classOf[BitcoinTransaction],
classOf[BitcoinTransactionWritable],
hadoopConf
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ final case class EthereumBlockRelation(location: String,
* returns EthereumBlocks as rows
**/
override def buildScan: RDD[Row] = {
val ethereumBlockRDD: RDD[(BytesWritable, common.EthereumBlock)] = readRawBlockRDD()
val ethereumBlockRDD: RDD[(BytesWritable, common.EthereumBlockWritable)] = readRawBlockRDD()

if (enrich) {
ethereumBlockRDD
Expand All @@ -67,7 +67,7 @@ final case class EthereumBlockRelation(location: String,
}
}

private def readRawBlockRDD(): RDD[(BytesWritable, common.EthereumBlock)] = {
private def readRawBlockRDD(): RDD[(BytesWritable, common.EthereumBlockWritable)] = {
// create hadoopConf
val hadoopConf = new Configuration()
hadoopConf.set(AbstractEthereumRecordReader.CONF_MAXBLOCKSIZE, String.valueOf(maxBlockSize))
Expand All @@ -77,7 +77,7 @@ final case class EthereumBlockRelation(location: String,
location,
classOf[EthereumBlockFileInputFormat],
classOf[BytesWritable],
classOf[common.EthereumBlock],
classOf[common.EthereumBlockWritable],
hadoopConf
)
}
Expand Down

0 comments on commit 1352289

Please sign in to comment.