From 6749b4ef03f9745aeb852c0ba3e1505a680892d1 Mon Sep 17 00:00:00 2001 From: hh <2397241381@qq.com> Date: Sun, 5 May 2024 21:06:15 +0800 Subject: [PATCH] =?UTF-8?q?=E6=B5=8B=E8=AF=95spark?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../main/java/org/jeecg/sy/Controller.java | 5 ++++ .../src/main/java/org/jeecg/sy/Test.scala | 28 +++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 system/start/src/main/java/org/jeecg/sy/Test.scala diff --git a/system/start/src/main/java/org/jeecg/sy/Controller.java b/system/start/src/main/java/org/jeecg/sy/Controller.java index 00304b6..722d79c 100644 --- a/system/start/src/main/java/org/jeecg/sy/Controller.java +++ b/system/start/src/main/java/org/jeecg/sy/Controller.java @@ -2,8 +2,11 @@ package org.jeecg.sy; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; import org.jeecg.common.api.vo.Result; import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @@ -33,6 +36,8 @@ public class Controller { list.set(i, list.get(list.size() - i - 1)); list.set(list.size() - i - 1, temp); } + + String df = Test$.MODULE$.getString(); return Result.ok(list); } diff --git a/system/start/src/main/java/org/jeecg/sy/Test.scala b/system/start/src/main/java/org/jeecg/sy/Test.scala new file mode 100644 index 0000000..00e3032 --- /dev/null +++ b/system/start/src/main/java/org/jeecg/sy/Test.scala @@ -0,0 +1,28 @@ +package org.jeecg.sy + +import org.apache.spark.sql.{DataFrame, SparkSession} + +object Test { + def getString(): String = { + // 本地模式 + val spark = SparkSession.builder() + .appName("SparkDemo") + .master("local") + .getOrCreate() + // 输出spark的版本信息 + // 去除所有日志 + spark.sparkContext.setLogLevel("WARN") + println("-----------------------\n\n\n\n\n\n") + // 产生是十个随机的数 + val data = 1 to 10 + // 生成RDD + val distData = spark.sparkContext.parallelize(data) + // 求出数字出现的次数 + val counts = distData.map(n => (n, 1)).reduceByKey(_ + _) + // 打印结果 + counts.collect().foreach(println) + println("\n\n\n\n\n\n-----------------------") + // 返回一个文字66 + return "66" + } +}