From 8241424df88bf368e69dd9c845c7d14c0fb3d581 Mon Sep 17 00:00:00 2001 From: Tony Sloane Date: Thu, 15 May 2014 12:47:33 +1000 Subject: [PATCH 1/2] Actually move to Kiama 1.6.0-SNAPSHOT --- project/dependencies.scala | 2 +- src/test/scala/com/nicta/scoobi/guide/Deployment.scala | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/project/dependencies.scala b/project/dependencies.scala index edba2a3f9..cae24928c 100644 --- a/project/dependencies.scala +++ b/project/dependencies.scala @@ -31,7 +31,7 @@ object dependencies { "org.apache.avro" % "avro" % "1.7.4", "com.thoughtworks.xstream" % "xstream" % "1.4.4" intransitive(), "javassist" % "javassist" % "3.12.1.GA", - "com.googlecode.kiama" %% "kiama" % "1.5.2", + "com.googlecode.kiama" %% "kiama" % "1.6.0-SNAPSHOT", "com.chuusai" % "shapeless_2.10.3" % "2.0.0-M1", "org.apache.commons" % "commons-math" % "2.2" % "test", "org.apache.commons" % "commons-compress" % "1.0" % "test") diff --git a/src/test/scala/com/nicta/scoobi/guide/Deployment.scala b/src/test/scala/com/nicta/scoobi/guide/Deployment.scala index 0c7c42bba..9851c02ed 100644 --- a/src/test/scala/com/nicta/scoobi/guide/Deployment.scala +++ b/src/test/scala/com/nicta/scoobi/guide/Deployment.scala @@ -78,7 +78,7 @@ However, we do need some of Scoobi's dependencies -- so we have to add them in m "org.apache.avro" % "avro" % "1.7.4", // Note: you only need this if you use it "com.thoughtworks.xstream" % "xstream" % "1.4.4" intransitive(), "javassist" % "javassist" % "3.12.1.GA", -"com.googlecode.kiama" %% "kiama" % "1.5.2", +"com.googlecode.kiama" %% "kiama" % "1.6.0-SNAPSHOT", "com.chuusai" % "shapeless_2.10.2" % "2.0.0-M1", ``` @@ -123,7 +123,7 @@ libraryDependencies ++= Seq( "org.scalaz" %% "scalaz-core" % "7.0.2", "com.thoughtworks.xstream" % "xstream" % "1.4.4" intransitive(), "javassist" % "javassist" % "3.12.1.GA", - "com.googlecode.kiama" %% "kiama" % "1.5.2", + "com.googlecode.kiama" %% "kiama" % "1.6.0-SNAPSHOT", "com.chuusai" % "shapeless_2.10.2" % "2.0.0-M1", ) @@ -144,7 +144,7 @@ libraryDependencies ++= Seq( "org.scala-lang" % "scala-compiler" % "2.10.3", "com.thoughtworks.xstream" % "xstream" % "1.4.4" intransitive(), "javassist" % "javassist" % "3.12.1.GA", - "com.googlecode.kiama" %% "kiama" % "1.5.2", + "com.googlecode.kiama" %% "kiama" % "1.6.0-SNAPSHOT", "com.chuusai" % "shapeless_2.10.2" % "2.0.0-M1", ) @@ -157,7 +157,7 @@ libraryDependencies ++= Seq( "org.scala-lang" % "scala-compiler" % "2.10.3", "com.thoughtworks.xstream" % "xstream" % "1.4.4" intransitive(), "javassist" % "javassist" % "3.12.1.GA", - "com.googlecode.kiama" %% "kiama" % "1.5.2", + "com.googlecode.kiama" %% "kiama" % "1.6.0-SNAPSHOT", "com.chuusai" % "shapeless_2.10.2" % "2.0.0-M1" ) From 8349096fe55debe66b9922ae41a30c66256c2328 Mon Sep 17 00:00:00 2001 From: Tony Sloane Date: Thu, 15 May 2014 13:12:43 +1000 Subject: [PATCH 2/2] Tighten types of rewrite rule applications, other simplifications --- .../com/nicta/scoobi/impl/plan/comp/Optimiser.scala | 10 +++++----- .../com/nicta/scoobi/impl/plan/comp/CompNodeData.scala | 5 ++--- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/src/main/scala/com/nicta/scoobi/impl/plan/comp/Optimiser.scala b/src/main/scala/com/nicta/scoobi/impl/plan/comp/Optimiser.scala index 2257f25a7..ab1d78a7f 100644 --- a/src/main/scala/com/nicta/scoobi/impl/plan/comp/Optimiser.scala +++ b/src/main/scala/com/nicta/scoobi/impl/plan/comp/Optimiser.scala @@ -62,7 +62,7 @@ trait Optimiser extends CompNodes with MemoRewriter { */ def parDoFuse = traverseSomebu(parDoFuseRule) - def parDoFuseRule = rule[Any] { + def parDoFuseRule = rule[ParallelDo] { case p2 @ ParallelDo((p1: ParallelDo) +: rest,_,_,_,_,_,_) if rest.isEmpty && uses(p1).filterNot(_ == p2).isEmpty && @@ -101,8 +101,8 @@ trait Optimiser extends CompNodes with MemoRewriter { /** * add a map to output values to non-filled sink nodes if there are some */ - def addParallelDoForNonFilledSinks = oncebu(rule[Any] { - case p: ProcessNode if p.sinks.exists(!hasBeenFilled) && p.sinks.exists(hasBeenFilled) => + def addParallelDoForNonFilledSinks = oncebu(rule[ProcessNode] { + case p if p.sinks.exists(!hasBeenFilled) && p.sinks.exists(hasBeenFilled) => logger.debug("add a parallelDo node to output non-filled sinks of "+p) ParallelDo.create(p)(p.wf).copy(nodeSinks = p.sinks.filterNot(hasBeenFilled)) }) @@ -124,7 +124,7 @@ trait Optimiser extends CompNodes with MemoRewriter { } /** duplicate the whole graph by copying all nodes */ - lazy val duplicate = (node: CompNode) => rewrite(everywhere(rule[Any] { + lazy val duplicate = (node: CompNode) => rewrite(everywhere(rule[CompNode] { case n: Op => n.copy() case n: Materialise => n.copy() case n: GroupByKey => n.copy() @@ -163,7 +163,7 @@ trait Optimiser extends CompNodes with MemoRewriter { } } - val truncateRule = rule[Any] { case n: Any => + val truncateRule = rule[Any] { case n => if (condition(n)) truncateNode(n) else n } diff --git a/src/test/scala/com/nicta/scoobi/impl/plan/comp/CompNodeData.scala b/src/test/scala/com/nicta/scoobi/impl/plan/comp/CompNodeData.scala index da4a275c7..ffa8e2820 100644 --- a/src/test/scala/com/nicta/scoobi/impl/plan/comp/CompNodeData.scala +++ b/src/test/scala/com/nicta/scoobi/impl/plan/comp/CompNodeData.scala @@ -101,9 +101,8 @@ object CompNodeData { outer => * a different order */ def normalise(result: Any) = rewrite { - everywherebu(rule[Any] { - case iterable: Iterable[_] => Vector(iterable.iterator.toSeq.sortBy(_.toString):_*) - case other => other + everywherebu(rule[Iterable[_]] { + case iterable => Vector(iterable.iterator.toSeq.sortBy(_.toString):_*) }) }(result).toString