Chapter 3 Create Project and Initialize Business Data (Process Record)

Project declarations and dependencies

ECommerceRecommendSystem [pom.xml]

  • Public declarations, dependencies, plug-ins

properties declaration

  • log4g : a framework for processing logs (specific implementation of logs)
  • sel4g : Simple log facade (simple log interface)
  • mongodb-spark : MongoDB and Spark interface
  • casbah : MongoDB driver on scala (the latest one is MongoScalaDriver)
  • redis、kafka、spark、scala
  • jblas: java linear algebra library (matrix operations)

dependences dependencies

  • dependencies: declaration + introduction
  • dependencyManagement: declare, not import
<dependencies>
    <!-- 引入共同的日志管理工具 -->
    <dependency>
        <groupId>org.slf4j</groupId>
        <artifactId>jcl-over-slf4j</artifactId>
        <version>${
    
    slf4j.version}</version>
    </dependency>
    <dependency>
        <groupId>org.slf4j</groupId>
        <artifactId>slf4j-api</artifactId>
        <version>${
    
    slf4j.version}</version>
    </dependency>
    <dependency>
        <groupId>org.slf4j</groupId>
        <artifactId>slf4j-log4j12</artifactId>
        <version>${
    
    slf4j.version}</version>
    </dependency>
    <dependency>
        <groupId>log4j</groupId>
        <artifactId>log4j</artifactId>
        <version>${
    
    log4j.version}</version>
    </dependency>
</dependencies>

plugin plugin

  • plugin: declaration + introduction

  • pluginManagement: declare, do not import

  • There may be a problem with scala references (recommender)
    Insert image description here

  • Pay attention to the version number required


Data loading module

  • Object: singleton project

package com.aguigu

import com.mongodb.MongoClientURI
import com.mongodb.casbah.Imports.MongoClientURI
import com.mongodb.casbah.MongoClient
import com.mongodb.casbah.commons.MongoDBObject
import org.apache.spark.SparkConf
import org.apache.spark.sql.{
    
    DataFrame, SparkSession}

/** 样例类
 * Product数据集
 * 3982                           商品ID
 * Fuhlen 富勒 M8眩光舞者时尚节能     商品名称
 * 1057,439,736                   商品分类ID,不需要
 * B009EJN4T2                      亚马逊ID,不需要
 * https://images-cn-4.ssl-image   商品的图片URL
 * 外设产品|鼠标|电脑/办公           商品分类
 * 富勒|鼠标|电子产品|好用|外观漂亮   商品UGC标签
 */
case class Product( productId:Int, name:String, URL:String, categories:String, tags:String )

/**
 * Rating数据集
 * 4867        用户ID
 * 457976      商品ID
 * 5.0         评分
 * 1395676800  时间戳
 */
case class Rating( userId: Int, productId: Int, score: Double, timestamp: Int )

/**
 * MongoDB连接配置
 * @param uri    MongoDB的连接uri
 * @param db     要操作的db
 */
case class MongoConfig( uri: String, db: String )

object DataLoader {
    
    
  // 常量
  val PRODUCT_DATA_PATH = "/Users/liuhao/MyProject/ECommerceRecommendSystem/ECommerceRecommendSystem/recommender/DataLoader/src/main/resources/products.csv"
  val RATING_DATA_PATH = "/Users/liuhao/MyProject/ECommerceRecommendSystem/ECommerceRecommendSystem/recommender/DataLoader/src/main/resources/ratings.csv"
  // 定义mongodb中存储的表名
  val MONGODB_PRODUCT_COLLECTION = "Product"
  val MONGODB_RATING_COLLECTION = "Rating"

  def main(args: Array[String]): Unit = {
    
    
    /**
     * 配置项
     */
    val config = Map(
      "spark.cores" -> "local[*]", // 所有逻辑核占用
      "mongo.uri" -> "mongodb://localhost:27017/recommender", // MongoDB数据库连接
      "mongo.db" -> "recommender"
    )

    /**
     * 创建Spark相关:sparkConf, sparkSession
     */
    // 创建 spark config
    val sparkConf = new SparkConf().setMaster(config("spark.cores")).setAppName("DataLoader")

    // 创建 spark session
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()

    /**
     * 加载数据
     */
    import spark.implicits._

    val productRDD = spark.sparkContext.textFile(PRODUCT_DATA_PATH)
    // RDD => DataFrame
    val productDF = productRDD.map( item => {
    
    
      // product数据通过'^'分割
      val attr = item.split("\\^")
      // 转换成product类
      Product( attr(0).toInt, attr(1).trim, attr(4).trim, attr(5).trim, attr(6).trim )
    }).toDF()

    val ratingRDD = spark.sparkContext.textFile(RATING_DATA_PATH)
    val ratingDF = ratingRDD.map(item => {
    
    
      val attr = item.split(",")
      Rating(attr(0).toInt, attr(1).toInt, attr(2).toDouble, attr(3).toInt)
    }).toDF()

    implicit val mongoConfig = MongoConfig( config("mongo.uri"), config("mongo.db"))  // 隐式配置
    storeDataInMongoDB( productDF, ratingDF )

    spark.stop()
  }

  def storeDataInMongoDB( productDF:DataFrame, ratingDF:DataFrame)(implicit mongoConfig: MongoConfig): Unit ={
    
    
    // 新建mongodb连接(casbah),客户端
    val mongoClinet = MongoClient(MongoClientURI(mongoConfig.uri) )
    // 定义要操作的mongodb表,理解:db.product
    val productCollection = mongoClinet( mongoConfig.db )( MONGODB_PRODUCT_COLLECTION )
    val ratingCollection = mongoClinet( mongoConfig.db )( MONGODB_RATING_COLLECTION )

    // 方式1:如果表已存在,则删除
    productCollection.dropCollection()
    ratingCollection.dropCollection()

    // 方式2:将当前数据存入对应表
    productDF.write
      .option("uri", mongoConfig.uri)
      .option("collection", MONGODB_PRODUCT_COLLECTION)
      .mode("overwrite")
      .format("com.mongodb.spark.sql")
      .save()

    ratingDF.write
      .option("uri", mongoConfig.uri)
      .option("collection", MONGODB_RATING_COLLECTION)
      .mode("overwrite")
      .format("com.mongodb.spark.sql")
      .save()

    // 对表创建索引
    productCollection.createIndex( MongoDBObject( "productId" -> 1 ) )
    ratingCollection.createIndex( MongoDBObject( "productId" -> 1 ) )
    ratingCollection.createIndex( MongoDBObject( "userId" -> 1 ) )

    mongoClinet.close()

  }
}

Guess you like

Origin blog.csdn.net/Lenhart001/article/details/131542562