|
@@ -0,0 +1,25 @@
|
|
|
+import java.util.Properties
|
|
|
+
|
|
|
+import org.apache.spark
|
|
|
+import org.apache.spark.sql.SparkSession
|
|
|
+
|
|
|
+object ToSqlServer {
|
|
|
+ case class studeng(id:Int,value:String)
|
|
|
+ def main(args: Array[String]): Unit = {
|
|
|
+ System.setProperty("hadoop.home.dir","E:\\Scala\\hadoop-2.6.5")
|
|
|
+ val ss = SparkSession.builder().appName("test").master("local").getOrCreate()
|
|
|
+ val sc = ss.sparkContext
|
|
|
+ var rdd1 = sc.textFile("./src/main/scala/data/outPut/sum22.txt")
|
|
|
+ var rdd2 =rdd1.map(_.split(","))
|
|
|
+ var rdd3 = rdd2.map(f => {studeng(f(0).toInt,f(1))})
|
|
|
+ val sql = ss.sqlContext
|
|
|
+ var df = sql.createDataFrame(rdd3)
|
|
|
+ df.createOrReplaceTempView("ss")
|
|
|
+ var df1 = sql.sql("select * from ss limit 10")
|
|
|
+ val prop = new Properties()
|
|
|
+ prop.setProperty("user","sa")
|
|
|
+ prop.setProperty("password","saljhy!@#")
|
|
|
+ prop.setProperty("driver","com.microsoft.sqlserver.jdbc.SQLServerDriver")
|
|
|
+ df1.write.mode("overwrite").jdbc("jdbc:sqlserver://192.168.50.32:1433/Mall","Sum",prop)
|
|
|
+ }
|
|
|
+}
|