Skip to content

Commit

Permalink
Initial work to rename package to org.apache.spark
Browse files Browse the repository at this point in the history
  • Loading branch information
mateiz committed Sep 1, 2013
1 parent a30fac1 commit 46eecd1
Show file tree
Hide file tree
Showing 521 changed files with 1,788 additions and 1,777 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ Or, for the Python API, the Python shell (`./pyspark`).
Spark also comes with several sample programs in the `examples` directory.
To run one of them, use `./run-example <class> <params>`. For example:

./run-example spark.examples.SparkLR local[2]
./run-example org.apache.spark.examples.SparkLR local[2]

will run the Logistic Regression example locally on 2 CPUs.

Expand Down
16 changes: 8 additions & 8 deletions assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-assembly</artifactId>
<name>Spark Project Assembly</name>
<url>http://spark-project.org/</url>
Expand All @@ -40,27 +40,27 @@

<dependencies>
<dependency>
<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-bagel</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-repl</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming</artifactId>
<version>${project.version}</version>
</dependency>
Expand Down Expand Up @@ -121,7 +121,7 @@
<id>hadoop2-yarn</id>
<dependencies>
<dependency>
<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-yarn</artifactId>
<version>${project.version}</version>
</dependency>
Expand Down
10 changes: 5 additions & 5 deletions assembly/src/main/assembly/assembly.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@
</fileSet>
<fileSet>
<directory>
${project.parent.basedir}/core/src/main/resources/spark/ui/static/
${project.parent.basedir}/core/src/main/resources/org/apache/spark/ui/static/
</directory>
<outputDirectory>/ui-resources/spark/ui/static</outputDirectory>
<outputDirectory>/ui-resources/org/apache/spark/ui/static</outputDirectory>
<includes>
<include>**/*</include>
</includes>
Expand Down Expand Up @@ -63,10 +63,10 @@
<dependencySets>
<dependencySet>
<includes>
<include>org.spark-project:*:jar</include>
<include>org.apache.spark:*:jar</include>
</includes>
<excludes>
<exclude>org.spark-project:spark-assembly:jar</exclude>
<exclude>org.apache.spark:spark-assembly:jar</exclude>
</excludes>
</dependencySet>
<dependencySet>
Expand All @@ -77,7 +77,7 @@
<useProjectArtifact>false</useProjectArtifact>
<excludes>
<exclude>org.apache.hadoop:*:jar</exclude>
<exclude>org.spark-project:*:jar</exclude>
<exclude>org.apache.spark:*:jar</exclude>
</excludes>
</dependencySet>
</dependencySets>
Expand Down
6 changes: 3 additions & 3 deletions bagel/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,21 +19,21 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-bagel</artifactId>
<packaging>jar</packaging>
<name>Spark Project Bagel</name>
<url>http://spark-project.org/</url>

<dependencies>
<dependency>
<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,32 +15,31 @@
* limitations under the License.
*/

package spark.bagel
package org.apache.spark.bagel

import spark._
import spark.SparkContext._
import org.apache.spark._
import org.apache.spark.SparkContext._

import scala.collection.mutable.ArrayBuffer
import storage.StorageLevel
import org.apache.spark.storage.StorageLevel

object Bagel extends Logging {
val DEFAULT_STORAGE_LEVEL = StorageLevel.MEMORY_AND_DISK

/**
* Runs a Bagel program.
* @param sc [[spark.SparkContext]] to use for the program.
* @param sc [[org.apache.spark.SparkContext]] to use for the program.
* @param vertices vertices of the graph represented as an RDD of (Key, Vertex) pairs. Often the Key will be
* the vertex id.
* @param messages initial set of messages represented as an RDD of (Key, Message) pairs. Often this will be an
* empty array, i.e. sc.parallelize(Array[K, Message]()).
* @param combiner [[spark.bagel.Combiner]] combines multiple individual messages to a given vertex into one
* @param combiner [[org.apache.spark.bagel.Combiner]] combines multiple individual messages to a given vertex into one
* message before sending (which often involves network I/O).
* @param aggregator [[spark.bagel.Aggregator]] performs a reduce across all vertices after each superstep,
* @param aggregator [[org.apache.spark.bagel.Aggregator]] performs a reduce across all vertices after each superstep,
* and provides the result to each vertex in the next superstep.
* @param partitioner [[spark.Partitioner]] partitions values by key
* @param partitioner [[org.apache.spark.Partitioner]] partitions values by key
* @param numPartitions number of partitions across which to split the graph.
* Default is the default parallelism of the SparkContext
* @param storageLevel [[spark.storage.StorageLevel]] to use for caching of intermediate RDDs in each superstep.
* @param storageLevel [[org.apache.spark.storage.StorageLevel]] to use for caching of intermediate RDDs in each superstep.
* Defaults to caching in memory.
* @param compute function that takes a Vertex, optional set of (possibly combined) messages to the Vertex,
* optional Aggregator and the current superstep,
Expand Down Expand Up @@ -98,7 +97,7 @@ object Bagel extends Logging {
verts
}

/** Runs a Bagel program with no [[spark.bagel.Aggregator]] and the default storage level */
/** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] and the default storage level */
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
sc: SparkContext,
vertices: RDD[(K, V)],
Expand All @@ -110,7 +109,7 @@ object Bagel extends Logging {
compute: (V, Option[C], Int) => (V, Array[M])
): RDD[(K, V)] = run(sc, vertices, messages, combiner, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)

/** Runs a Bagel program with no [[spark.bagel.Aggregator]] */
/** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] */
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
sc: SparkContext,
vertices: RDD[(K, V)],
Expand All @@ -128,7 +127,7 @@ object Bagel extends Logging {
}

/**
* Runs a Bagel program with no [[spark.bagel.Aggregator]], default [[spark.HashPartitioner]]
* Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], default [[org.apache.spark.HashPartitioner]]
* and default storage level
*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
Expand All @@ -141,7 +140,7 @@ object Bagel extends Logging {
compute: (V, Option[C], Int) => (V, Array[M])
): RDD[(K, V)] = run(sc, vertices, messages, combiner, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)

/** Runs a Bagel program with no [[spark.bagel.Aggregator]] and the default [[spark.HashPartitioner]]*/
/** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] and the default [[org.apache.spark.HashPartitioner]]*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
sc: SparkContext,
vertices: RDD[(K, V)],
Expand All @@ -159,8 +158,8 @@ object Bagel extends Logging {
}

/**
* Runs a Bagel program with no [[spark.bagel.Aggregator]], default [[spark.HashPartitioner]],
* [[spark.bagel.DefaultCombiner]] and the default storage level
* Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], default [[org.apache.spark.HashPartitioner]],
* [[org.apache.spark.bagel.DefaultCombiner]] and the default storage level
*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest](
sc: SparkContext,
Expand All @@ -172,8 +171,8 @@ object Bagel extends Logging {
): RDD[(K, V)] = run(sc, vertices, messages, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)

/**
* Runs a Bagel program with no [[spark.bagel.Aggregator]], the default [[spark.HashPartitioner]]
* and [[spark.bagel.DefaultCombiner]]
* Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], the default [[org.apache.spark.HashPartitioner]]
* and [[org.apache.spark.bagel.DefaultCombiner]]
*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest](
sc: SparkContext,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,14 @@
* limitations under the License.
*/

package spark.bagel
package org.apache.spark.bagel

import org.scalatest.{FunSuite, Assertions, BeforeAndAfter}
import org.scalatest.{BeforeAndAfter, FunSuite, Assertions}
import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._

import scala.collection.mutable.ArrayBuffer

import spark._
import storage.StorageLevel
import org.apache.spark._
import org.apache.spark.storage.StorageLevel

class TestVertex(val active: Boolean, val age: Int) extends Vertex with Serializable
class TestMessage(val targetId: String) extends Message[String] with Serializable
Expand Down
2 changes: 1 addition & 1 deletion bin/start-master.sh
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
fi
fi

"$bin"/spark-daemon.sh start spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
2 changes: 1 addition & 1 deletion bin/start-slave.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
fi
fi

"$bin"/spark-daemon.sh start spark.deploy.worker.Worker "$@"
"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"
2 changes: 1 addition & 1 deletion bin/stop-master.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,4 @@ bin=`cd "$bin"; pwd`

. "$bin/spark-config.sh"

"$bin"/spark-daemon.sh stop spark.deploy.master.Master 1
"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
4 changes: 2 additions & 2 deletions bin/stop-slaves.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@ if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
fi

if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
"$bin"/spark-daemons.sh stop spark.deploy.worker.Worker 1
"$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
else
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
"$bin"/spark-daemons.sh stop spark.deploy.worker.Worker $(( $i + 1 ))
"$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
done
fi
4 changes: 2 additions & 2 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

<groupId>org.spark-project</groupId>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<packaging>jar</packaging>
<name>Spark Project Core</name>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
* limitations under the License.
*/

package spark.network.netty;
package org.apache.spark.network.netty;

import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
* limitations under the License.
*/

package spark.network.netty;
package org.apache.spark.network.netty;

import io.netty.buffer.BufType;
import io.netty.channel.ChannelInitializer;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
* limitations under the License.
*/

package spark.network.netty;
package org.apache.spark.network.netty;

import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
* limitations under the License.
*/

package spark.network.netty;
package org.apache.spark.network.netty;

import java.net.InetSocketAddress;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
* limitations under the License.
*/

package spark.network.netty;
package org.apache.spark.network.netty;

import io.netty.channel.ChannelInitializer;
import io.netty.channel.socket.SocketChannel;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
* limitations under the License.
*/

package spark.network.netty;
package org.apache.spark.network.netty;

import java.io.File;
import java.io.FileInputStream;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
* limitations under the License.
*/

package spark.network.netty;
package org.apache.spark.network.netty;


public interface PathResolver {
Expand Down
Loading

0 comments on commit 46eecd1

Please sign in to comment.