Skip to content

Commit

Permalink
Revert "Merge pull request #337 from NICTA/topic/scalaz-7.1-upgrade"
Browse files Browse the repository at this point in the history
This reverts commit 3f5391a, reversing
changes made to 1a004fe.

Conflicts:
	project/dependencies.scala
  • Loading branch information
charleso committed Sep 4, 2014
1 parent e956c7e commit 1077768
Show file tree
Hide file tree
Showing 11 changed files with 37 additions and 38 deletions.
2 changes: 1 addition & 1 deletion project/dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ object dependencies {
else if (version.contains("cdh5")) Seq("com.nicta" %% "scoobi-compatibility-cdh5" % "1.0.2")
else Seq("com.nicta" %% "scoobi-compatibility-hadoop2" % "1.0.2")

def scalaz(scalazVersion: String = "7.1.0") = Seq(
def scalaz(scalazVersion: String = "7.0.6") = Seq(
"org.scalaz" %% "scalaz-core" % scalazVersion,
"org.scalaz" %% "scalaz-iteratee" % scalazVersion,
"org.scalaz" %% "scalaz-concurrent" % scalazVersion,
Expand Down
22 changes: 11 additions & 11 deletions src/main/scala/com/nicta/scoobi/application/HadoopLogFactory.scala
Original file line number Diff line number Diff line change
Expand Up @@ -95,15 +95,15 @@ object HadoopLogFactory {
def setLogFactory(name: String = classOf[HadoopLogFactory].getName,
quiet: Boolean = false,
showTimes: Boolean = false,
level: Level = INFO,
level: String = INFO,
categories: String = ".*") {
// release any previously set LogFactory for this class loader
LogFactory.release(Thread.currentThread.getContextClassLoader)
setLogFactoryName(name)
setAttributes(quiet, showTimes, level, categories)
}

def setAttributes(quiet: Boolean, showTimes: Boolean, level: Level, categories: String) {
def setAttributes(quiet: Boolean, showTimes: Boolean, level: String, categories: String) {
setQuiet(quiet)
setShowTimes(showTimes)
setLogLevel(level)
Expand All @@ -120,7 +120,7 @@ object HadoopLogFactory {
def setShowTimes(showTimes: Boolean = false) {
LogFactory.getFactory.setAttribute(SHOW_TIMES, showTimes)
}
def setLogLevel(level: Level = INFO) {
def setLogLevel(level: String = INFO) {
LogFactory.getFactory.setAttribute(LOG_LEVEL, level)
}
def setLogCategories(categories: String = ".*") {
Expand All @@ -136,16 +136,16 @@ object HadoopLogFactory {
FATAL -> SimpleLog.LOG_LEVEL_FATAL,
OFF -> SimpleLog.LOG_LEVEL_OFF)

lazy val allLevels = levelsMappings.keys.map(_.level).toSet
lazy val allLevels = levelsMappings.keys.map(_.toString).toSet


lazy val ALL : Level = Level("ALL" )
lazy val TRACE: Level = Level("TRACE")
lazy val INFO : Level = Level("INFO" )
lazy val WARN : Level = Level("WARN" )
lazy val ERROR: Level = Level("ERROR")
lazy val FATAL: Level = Level("FATAL")
lazy val OFF : Level = Level("OFF" )
lazy val ALL : Level = level("ALL" )
lazy val TRACE: Level = level("TRACE")
lazy val INFO : Level = level("INFO" )
lazy val WARN : Level = level("WARN" )
lazy val ERROR: Level = level("ERROR")
lazy val FATAL: Level = level("FATAL")
lazy val OFF : Level = level("OFF" )

}

Expand Down
7 changes: 6 additions & 1 deletion src/main/scala/com/nicta/scoobi/application/Levels.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,12 @@
package com.nicta.scoobi
package application

import scalaz.@@

object Levels {

case class Level(level: String) extends AnyVal
trait AsLevel
type Level = String @@ AsLevel
def level(l: String): Level = l.asInstanceOf[Level]

}
5 changes: 2 additions & 3 deletions src/main/scala/com/nicta/scoobi/application/ScoobiApp.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
package com.nicta.scoobi
package application

import Levels._
import impl.io.FileSystems
import impl.reflect.{ClasspathDiagnostics, Classes}
import org.apache.commons.logging.LogFactory
Expand Down Expand Up @@ -91,8 +90,8 @@ trait ScoobiApp extends ScoobiCommandLineArgs with ScoobiAppConfiguration with H
// so that we know if configuration files must be read or not
set(arguments)
HadoopLogFactory.setLogFactory(classOf[HadoopLogFactory].getName, quiet, showTimes, level, categories)
configuration.set("mapred.map.child.log.level", level.level)
configuration.set("mapred.reduce.child.log.level", level.level)
configuration.set("mapred.map.child.log.level", level)
configuration.set("mapred.reduce.child.log.level", level)

logger.debug("parsing the hadoop arguments "+ arguments.mkString(", "))
configuration.withHadoopArgs(arguments) { remainingArgs =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ trait ScoobiUserArgs extends ScoobiArgs {

private[scoobi]
def extractLevel(args: Seq[String]): Level =
args.filter(a => allLevels contains a.toUpperCase).map(l => Level(l.toUpperCase)).headOption.getOrElse(INFO)
args.filter(a => allLevels contains a.toUpperCase).map(l => l.toUpperCase.asInstanceOf[Level]).headOption.getOrElse(INFO)

/**
* extract the categories as a regular expression from the scoobi arguments, once all the other argument names have been
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/com/nicta/scoobi/core/DList.scala
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ trait DList[A] extends DataSinks with Persistent[Seq[A]] {
// Oh, no. We have a key collision, so let's try randomize as much as we can
else if (util.Random.nextBoolean()) scalaz.Ordering.LT
else scalaz.Ordering.GT
override def groupCompare(a: Int, b: Int) = scalaz.Order.fromScalaOrdering[Int].order(a,b)
override def groupCompare(a: Int, b: Int) = implicitly[scalaz.Order[Int]].order(a,b)
}
groupWith(_ => util.Random.nextInt())(sgp).mapFlatten(_._2)
}
Expand Down
27 changes: 13 additions & 14 deletions src/main/scala/com/nicta/scoobi/core/Reduction.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ package core

import scalaz.{BijectionT, Semigroup, Kleisli, Cokleisli, Endo, Store, Order, Equal, Digit, Writer, Compose,
NonEmptyList, EphemeralStream, Validation, \/-, -\/, \/, State, Ordering, Failure, Success, Apply}
import scalaz.Kleisli.kleisli
import scalaz.syntax.equal._
import BijectionT._
import scala.reflect.ClassTag
Expand Down Expand Up @@ -262,40 +261,40 @@ trait Reduction[A] {
/**
* Takes a reduction to a reduction on a unary function in an environment (Q).
*/
def pointwiseK[Q[_], B](implicit A: Apply[Q]): Reduction[Kleisli[Q, B, A]] =
Reduction((g, h) => kleisli(
def pointwiseK[Q[+_], B](implicit A: Apply[Q]): Reduction[Kleisli[Q, B, A]] =
Reduction((g, h) => Kleisli(
b => A.apply2(g(b), h(b))(reduce(_, _))
))

/**
* Takes a reduction to a reduction on a binary function with an environment (Q) in return position.
*/
def pointwise2K[Q[_], B, C](implicit A: Apply[Q]): Reduction[Kleisli[Q, (B, C), A]] =
Reduction((g, h) => kleisli {
def pointwise2K[Q[+_], B, C](implicit A: Apply[Q]): Reduction[Kleisli[Q, (B, C), A]] =
Reduction((g, h) => Kleisli {
case (b, c) => A.apply2(g(b, c), h(b, c))(reduce(_, _))
})

/**
* Takes a reduction to a reduction on a ternary function with an environment (Q) in return position.
*/
def pointwise3K[Q[_], B, C, D](implicit A: Apply[Q]): Reduction[Kleisli[Q, (B, C, D), A]] =
Reduction((g, h) => kleisli {
def pointwise3K[Q[+_], B, C, D](implicit A: Apply[Q]): Reduction[Kleisli[Q, (B, C, D), A]] =
Reduction((g, h) => Kleisli {
case (b, c, d) => A.apply2(g(b, c, d), h(b, c, d))(reduce(_, _))
})

/**
* Takes a reduction to a reduction on an arity-4 function with an environment (Q) in return position.
*/
def pointwise4K[Q[_], B, C, D, E](implicit A: Apply[Q]): Reduction[Kleisli[Q, (B, C, D, E), A]] =
Reduction((g, h) => kleisli {
def pointwise4K[Q[+_], B, C, D, E](implicit A: Apply[Q]): Reduction[Kleisli[Q, (B, C, D, E), A]] =
Reduction((g, h) => Kleisli {
case (b, c, d, e) => A.apply2(g(b, c, d, e), h(b, c, d, e))(reduce(_, _))
})

/**
* Takes a reduction to a reduction on an arity-5 function with an environment (Q) in return position.
*/
def pointwise5K[Q[_], B, C, D, E, F](implicit A: Apply[Q]): Reduction[Kleisli[Q, (B, C, D, E, F), A]] =
Reduction((g, h) => kleisli {
def pointwise5K[Q[+_], B, C, D, E, F](implicit A: Apply[Q]): Reduction[Kleisli[Q, (B, C, D, E, F), A]] =
Reduction((g, h) => Kleisli {
case (b, c, d, e, f) => A.apply2(g(b, c, d, e, f), h(b, c, d, e, f))(reduce(_, _))
})

Expand Down Expand Up @@ -365,21 +364,21 @@ trait Reduction[A] {
/**
* Lifts this reduction to a reduction with an environment.
*/
def lift[F[_]](implicit A: Apply[F]): Reduction[F[A]] =
def lift[F[+_]](implicit A: Apply[F]): Reduction[F[A]] =
Reduction((a1, a2) =>
A.apply2(a1, a2)(reduce(_, _)))

/**
* Takes a reduction to a reduction on state.
*/
def state[S]: Reduction[State[S, A]] =
lift[({type lam[a]=State[S, a]})#lam]
lift[({type lam[+a]=State[S, a]})#lam]

/**
* Takes a reduction to a reduction on writer.
*/
def writer[W: Semigroup]: Reduction[Writer[W, A]] =
lift[({type lam[a]=Writer[W, a]})#lam]
lift[({type lam[+a]=Writer[W, a]})#lam]

/**
* Maps a pair of functions on a reduction to produce a reduction.
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/com/nicta/scoobi/lib/Relational.scala
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ object Relational {

implicit val grouping = new Grouping[(K, Int)] {
def groupCompare(a: (K, Int), b: (K, Int)) =
implicitly[Grouping[K]].groupCompare(a._1, b._1) |+| scalaz.Order.fromScalaOrdering[Int].order(a._2, b._2)
implicitly[Grouping[K]].groupCompare(a._1, b._1) |+| a._2 ?|? b._2
}

Relational.join(
Expand Down
2 changes: 0 additions & 2 deletions src/test/scala/com/nicta/scoobi/acceptance/ScaldingSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ import Movies._
class ScaldingSpec extends NictaSimpleJobs {
"Translation of http://blog.echen.me/2012/02/09/movie-recommendations-and-more-via-mapreduce-and-scalding to Scoobi" >> { implicit sc: SC =>

implicit val IntOrdering = scalaz.Order.fromScalaOrdering[Int]

// very small list of ratings
val ratings = DList(
Rating(1, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,6 @@ object SecondarySort {
import syntax.semigroup._
import syntax.order._

implicit val StringOrdering = scalaz.Order.fromScalaOrdering[String]

val secondary: Grouping[(FirstName, LastName)] = new Grouping[(FirstName, LastName)] {
override def partition(key: (FirstName, LastName), howManyReducers: Int): Int =
implicitly[Grouping[FirstName]].partition(key._1, howManyReducers)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class ScoobiAppSpec extends UnitSpecification with Tables {
}
"By defaults logs must be displayed, at the INFO level" >> {
app.quiet aka "quiet" must beFalse
app.level.level must_== "INFO"
app.level must_== "INFO"
}
"If 'quiet' is passed on the command line then there must be no logs" >> {
val app = new ScoobiApp { def run {} }
Expand Down

0 comments on commit 1077768

Please sign in to comment.