springboot远程链接spark
springboot远程链接spark
1、导入依赖
-
<!-- spark依赖--> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_2.12</artifactId> <version>3.2.2</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql_2.12</artifactId> <version>3.2.2</version> </dependency> <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-mllib --> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-mllib_2.12</artifactId> <version>3.2.2</version> </dependency>
2、配置spark信息
-
建立一个配置文件,配置spark信息
-
import org.apache.spark.SparkConf; import org.apache.spark.sql.SparkSession; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; //将文件交于spring管理 @Configuration public class SparkConfig { //使用yml中的配置 @Value("${spark.master}") private String sparkMaster; @Value("${spark.appName}") private String sparkAppName; @Bean public SparkConf sparkConf() { SparkConf conf = new SparkConf(); conf.setMaster(sparkMaster); conf.setAppName(sparkAppName); return conf; } @Bean public SparkSession sparkSession() { return SparkSession.builder() .config(sparkConf()) .getOrCreate(); } }
-
3、controller和service
-
controller类
-
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import xyz.zzj.traffic_main_code.service.SparkService; @RestController @RequestMapping("/spark") public class SparkController { @Autowired private SparkService sparkService; @GetMapping("/run") public String runSparkJob() { sparkService.executeSparkJob(); return "Spark job executed successfully!"; } }
-
-
service
-
import org.springframework.stereotype.Service; @Service public class SparkService { public void executeSparkJob() { System.out.println("Spark job started"); } }
-
4、运行
- 运行完后,打开浏览器
- http://localhost:8080/spark/run
- 观察spark认为
- http://192.168.44.128:8099/