SparkSQL读取Cassandra数据源

版权声明:转载请注明出处 https://blog.csdn.net/qiaojialin/article/details/80812106

pom依赖

        <dependency>
            <groupId>com.datastax.spark</groupId>
            <artifactId>spark-cassandra-connector_2.11</artifactId>
            <version>2.0.0-M1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_2.11</artifactId>
            <version>2.1.1</version>
        </dependency>

scala版

import com.datastax.spark.connector.CassandraRow
import com.datastax.spark.connector.rdd.CassandraRDD
import org.apache.spark.{SparkConf, SparkContext}
import com.datastax.spark.connector._


object test {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf(true)
      .set("spark.cassandra.connection.host", "127.0.0.1")
    val sc = new SparkContext("local", "test", conf)
    val rdd : CassandraRDD[CassandraRow] = sc.cassandraTable("test", "words").where("word in ('foo', 'fo2') and count > 1 and count < 8")
    rdd.foreach(row => {println(row.toString())})
  }
}

Java版

https://github.com/datastax/spark-cassandra-connector/blob/master/doc/7_java_api.md

import com.datastax.spark.connector.japi.rdd.CassandraJavaRDD;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import static com.datastax.spark.connector.japi.CassandraJavaUtil.javaFunctions;

public class test {
    public static void main(String[] args) {
        SparkConf conf= new SparkConf(true).set("spark.cassandra.connection.host", "127.0.0.1");
        SparkContext sc = new SparkContext("local", "test", conf);
        CassandraJavaRDD rdd = javaFunctions(sc).cassandraTable("test", "words");
        CassandraJavaRDD result = rdd.where("word in ('foo', 'fo2') and count > 1 and count < 8");
        result.foreach(row -> System.out.println(row));
    }
}

欢迎关注个人公众号:数据库漫游指南

这里写图片描述

猜你喜欢

转载自blog.csdn.net/qiaojialin/article/details/80812106