import org.apache.spark.sql.{DataFrame, SQLContext} import org.apache.spark.{SparkConf, SparkContext} /** * Created by zhangshuai on 2016/9/27. */ object Mysql_spark_scala { def main (args: Array[String]) { val conf=new SparkConf().setAppName("Mysql_spark_scala").setMaster("local") val sc=new SparkContext(conf) val sqlContext = new SQLContext(sc) val url="jdbc:mysql://192.168.1.106:3306/ws_shop_pos" val prop = new java.util.Properties prop.setProperty("user","root") prop.setProperty("password","root") val list= sqlContext.read.jdbc(url, "bill_flow_20160", prop).groupBy("shop_id", "store_id").count.orderBy("count") list.show(100) } }
import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.DataFrame; import org.apache.spark.sql.SQLContext; import java.util.Properties; /** * Created by zhangshuai on 2016/9/27. */ public class Mysql_spark { public static void main(String[] args) { SparkConf sparkConf=new SparkConf(); sparkConf.setAppName("Mysql_spark"); sparkConf.setMaster("local"); JavaSparkContext sc=null; sc = new JavaSparkContext(sparkConf); SQLContext sqlContext=new SQLContext(sc); String url="jdbc:mysql://192.168.1.106:3306/ws_shop_pos"; Properties connectionProperties = new Properties(); connectionProperties.setProperty("user", "root");// 设置用户名 connectionProperties.setProperty("password", "root");// 设置密码 DataFrame jdbcDF = sqlContext.read().jdbc(url, "bill_flow_20160", connectionProperties).groupBy("shop_id","store_id").count().orderBy("count"); jdbcDF.show(100); } }
欢迎来到这里!
我们正在构建一个小众社区,大家在这里相互信任,以平等 • 自由 • 奔放的价值观进行分享交流。最终,希望大家能够找到与自己志同道合的伙伴,共同成长。
注册 关于