概述
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import java.util.Properties;
public class MysqlTohive {
// TODO: 2020/9/16 将mysql查询的表数据插入到hive库
public static void main(String[] args) {
SparkSession spark = SparkSession.builder().appName("sqlToHive").getOrCreate();
Properties dbConfPro = new Properties();
dbConfPro.setProperty("user", "root");
dbConfPro.setProperty("password", "root");
String query="(select * from student) m";
Dataset<Row> result = spark.read().jdbc("jdbc:mysql://192.168.8.103:3306/test?rewriteBatchedStatements=true", query, dbConfPro);
//创建对应的视图表
result.createOrReplaceTempView("student");
spark.catalog().setCurrentDatabase("test");
String query2="insert overwrite table student select * from student";
spark.sql(query2);
}
}
所需pom.xml
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>2.1.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-hive -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.11</artifactId>
<version>2.1.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.27</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.12.1</version>
</dependency>
</dependencies>
最后
以上就是年轻小蜜蜂为你收集整理的将mysql查询的表数据插入到hive库的全部内容,希望文章能够帮你解决将mysql查询的表数据插入到hive库所遇到的程序开发问题。
如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。
本图文内容来源于网友提供,作为学习参考使用,或来自网络收集整理,版权属于原作者所有。
发表评论 取消回复