Skip to content

Commit

Permalink
Modified project structure to work with buildr
Browse files Browse the repository at this point in the history
  • Loading branch information
mateiz committed Nov 14, 2010
1 parent f8966ff commit b84769a
Show file tree
Hide file tree
Showing 52 changed files with 47 additions and 7 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,5 @@ third_party/libmesos.dylib
conf/java-opts
conf/spark-env.sh
conf/log4j.properties
target
reports
2 changes: 1 addition & 1 deletion alltests
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ if [ -d $RESULTS_DIR ]; then
rm -r $RESULTS_DIR
fi
mkdir -p $RESULTS_DIR
$FWDIR/run org.scalatest.tools.Runner -p $FWDIR/build/classes -u $RESULTS_DIR -o $@
$FWDIR/run org.scalatest.tools.Runner -p $FWDIR/target/test/classes -u $RESULTS_DIR -o $@
22 changes: 22 additions & 0 deletions buildfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
require 'buildr/scala'

# Version number for this release
VERSION_NUMBER = "0.0.0"
# Group identifier for your projects
GROUP = "spark"
COPYRIGHT = ""

# Specify Maven 2.0 remote repositories here, like this:
repositories.remote << "http://www.ibiblio.org/maven2/"

THIRD_PARTY_JARS = Dir["third_party/**/*.jar"]

desc "The Spark project"
define "spark" do
project.version = VERSION_NUMBER
project.group = GROUP
manifest["Implementation-Vendor"] = COPYRIGHT
compile.with THIRD_PARTY_JARS
package(:jar)
test.using :scalatest, :fork => true
end
4 changes: 2 additions & 2 deletions run
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ if [ -e $FWDIR/conf/java-opts ] ; then
fi
export JAVA_OPTS

# Build up classpath
CLASSPATH="$SPARK_CLASSPATH:$FWDIR/build/classes:$MESOS_CLASSPATH"
# build up classpath
CLASSPATH="$SPARK_CLASSPATH:$FWDIR/target/classes:$FWDIR/target/test/classes:$MESOS_CLASSPATH"
CLASSPATH+=:$FWDIR/conf
CLASSPATH+=:$FWDIR/third_party/mesos.jar
CLASSPATH+=:$FWDIR/third_party/asm-3.2/lib/all/asm-all-3.2.jar
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
else xs.reduceLeft(_ zip _ takeWhile (x => x._1 == x._2) map (_._1) mkString)

// This is jline's entry point for completion.
override def complete(_buf: String, cursor: Int, candidates: JList[String]): Int = {
override def complete(_buf: String, cursor: Int, candidates: JList[_]): Int = {
val buf = onull(_buf)
verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
DBG("complete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
Expand All @@ -321,7 +321,7 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
case Nil => None
case xs =>
// modify in place and return the position
xs foreach (candidates add _)
xs.foreach(x => candidates.asInstanceOf[JList[AnyRef]].add(x))

// update the last buffer unless this is an alternatives list
if (xs contains "") Some(p.cursor)
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,8 @@ extends InterpreterControl {
settings.classpath append addedClasspath

interpreter = new SparkInterpreter(settings, out) {
override protected def parentClassLoader = classOf[SparkInterpreterLoop].getClassLoader
override protected def parentClassLoader =
classOf[SparkInterpreterLoop].getClassLoader
}
interpreter.setContextClassLoader()
// interpreter.quietBind("settings", "spark.repl.SparkInterpreterSettings", interpreter.isettings)
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,16 +1,31 @@
package spark.repl

import java.io._
import java.net.URLClassLoader

import scala.collection.mutable.ArrayBuffer
import scala.collection.JavaConversions._

import org.scalatest.FunSuite

class ReplSuite extends FunSuite {
def runInterpreter(master: String, input: String): String = {
val in = new BufferedReader(new StringReader(input + "\n"))
val out = new StringWriter()
val cl = getClass.getClassLoader
var paths = new ArrayBuffer[String]
if (cl.isInstanceOf[URLClassLoader]) {
val urlLoader = cl.asInstanceOf[URLClassLoader]
for (url <- urlLoader.getURLs) {
if (url.getProtocol == "file") {
paths += url.getFile
}
}
}
val interp = new SparkInterpreterLoop(in, new PrintWriter(out), master)
spark.repl.Main.interp = interp
interp.main(new Array[String](0))
val separator = System.getProperty("path.separator")
interp.main(Array("-classpath", paths.mkString(separator)))
spark.repl.Main.interp = null
return out.toString
}
Expand Down

0 comments on commit b84769a

Please sign in to comment.