import scala.math.random
import org.apache.spark._
/**
* Created by code-pc on 16/3/2.
*/objecttest1 {def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Spark Pi").setMaster("local")
val spark = new SparkContext(conf)
val slices = if (args.length > 0) args(0).toInt else2val n = 100000 * slices
val count = spark.parallelize(1 to n, slices).map { i =>
val x = random * 2 - 1val y = random * 2 - 1if (x*x + y*y < 1) 1else0
}.reduce(_ + _)
println("Pi is roughly " + 4.0 * count / n)
spark.stop()
}
}
编译jar包
File -> Project Structure -> Artifacts -> + -> Jars -> From moudles with dependencies