import org.apache.spark.{
SparkConf, SparkContext}
object Test6 {
def main(args: Array[String]): Unit = {
val sparkconf = new SparkConf().setMaster("local[*]").setAppName("wordcount")
val sc =new SparkContext(sparkconf)
val rdd= sc.parallelize(List(1,2,5,7,8,9,3,4,4,5),
spark的去重算子
最新推荐文章于 2023-05-04 09:10:41 发布