Als least squares


spark-submit --class   com.ones.soc.cf.MoiveRecommender --master yarn --num-executors 3 --driver-memory 5g --executor-memory 4g /root/bigData.jar 2 5 0.01 /ones/mldata/1u.user /ones/mldata/1u.data /ones/result/1



package com.ones.soc.cf


import com.ones.soc.json.JSONObject
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{Path, FileSystem}
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.spark.mllib.recommendation._
import org.apache.spark.rdd. {PairRDDFunctions, RDD}
import org.apache.spark.SparkContext
import scala.collection.mutable.HashMap
import java.util.List
import java.util.ArrayList
/**
  * Created by tom
  */
object MoiveRecommender {

  val numRecommender = 10

  case class Params(
                     input: String = null,
                     numIterations: Int = 20,
                     lambda: Double = 1.0,
                     rank: Int = 10,
                     numUserBlocks: Int = -1,
                     numProductBlocks: Int = -1,
                     implicitPrefs: Boolean = false,
                     userDataInput: String = null)

  def main(args: Array[String]) {
      run(args: Array[String])
  }

  def run(args: Array[String]) {
    val confighdfs = new Configuration();
    val fs=FileSystem.get(confighdfs) ;
    if(args(5) != null && args(5).trim().length > 1){
      val output = new Path(args(5));
      if(fs.exists(output)){ //delete the output directory
         fs.delete(output, true);
      }
    }

    var input: String = null
    var numIterations: Int = 20
    var lambda: Double = 1.0
    var rank: Int = 10
    var numUserBlocks: Int = -1
    var numProductBlocks: Int = -1
    var implicitPrefs: Boolean = false
    var userDataInput: String = null

    rank=args(0).toInt
    numIterations=args(1).toInt
    lambda=args(2).toDouble
    userDataInput=args(3).toString
    input=args(4).toString
    var outpath=args(5).toString

    //Local operation mode, read the local spark home directory
    var conf = new SparkConf().setAppName("Moive Recommendation")
    //.setSparkHome("D:\\work\\hadoop_lib\\spark-1.1.0-bin-hadoop2.4\\spark-1.1.0-bin-hadoop2.4")
    //conf.setMaster("local[*]")

    //Cluster running mode, read the environment variables of the spark cluster
    //var conf = new SparkConf().setAppName("Moive Recommendation")
    val context = new SparkContext(conf)
    //Download Data
    val data = context.textFile(input)
    /**
      * *MovieLens ratings are on a scale of 1-5:
      * 5: Must see
      * 4: Will enjoy
      * 3: It's okay
      * 2: Fairly bad
      * 1: Awful
      */
    val ratings = data.map(_.split("\t") match {
      case Array(user, item, rate, time) => Rating(user.toInt, item.toInt, rate.toDouble)
    })

    //Use ALS to build a recommendation model
    // can also use simple mode val model = ALS.train(ratings, ranking, numIterations)
    trap model = new IF()
      .setRank(rank)
      .setIterations (numIterations)
      .setLambda(lambda)
      .setImplicitPrefs(implicitPrefs)
      .setUserBlocks(numUserBlocks)
      .setProductBlocks(numProductBlocks)
      .run(ratings)

    //predict data and save
    predictMoive(userDataInput, context, model,fs,outpath)
    // model evaluation
    evaluateMode(ratings, model)
    //clean up
    context.stop()
  }

  /**
    * Model evaluation
    */
  private def evaluateMode(ratings: RDD[Rating], model: MatrixFactorizationModel) {

    //Train the model using the training data
    val usersProducets = ratings.map(r => r match {
      case Rating(user, product, rate) => (user, product)
    })

    //predict data
    val predictions = model.predict(usersProducets).map(u => u match {
      case Rating(user, product, rate) => ((user, product), rate)
    })

    // Merge the true score with the predicted score
    val ratesAndPreds = ratings.map(r => r match {
      case Rating(user, product, rate) =>
        ((user, product), rate)
    }).join(predictions)

    //calculate mean square error
    val MSE = ratesAndPreds.map(r => r match {
      case ((user, product), (r1, r2)) =>
        var err = (r1 - r2)
        err * err
    }).mean()

    // print out the mean square error
    println("Mean Squared Error = " + MSE)
  }

  /**
    * Predict data and save
    */
  private def predictMoive(userDataInput: String, context: SparkContext, model: MatrixFactorizationModel,fs:FileSystem,outpath:String) {

    var recommenders = new ArrayList[java.util.Map[String, String]]();
    var sb = new StringBuilder

    //Read user data that needs to be recommended for movies
    val userData = context.textFile(userDataInput) //u.user

    userData.map(_.split("\\|") match {
      case Array(id, age, sex, job, x) => (id)
    }).collect().foreach(id => {
      //Recommend movies for users
      var rs = model.recommendProducts(id.toInt, numRecommender)
      var value = ""
      var key = 0


      rs.foreach(r => {
        key = r.user
        value = value + r.product + ":" + r.rating + ","
      })
      sb.append("user="+key+"\t"+"value="+value).append("\r\n")
     //Success, encapsulate the put object and wait to be inserted into Hbase
     /*
     if (!value.equals("")) {
        var put = new java.util.HashMap [String, String] ()
        put.put("rowKey", key.toString)
        put.put("t:info", value)
        recommenders.add(put)
       }
      */
    })
    outputHdfs(fs,sb.toString(),outpath)

    //Save to the [recommender] table of HBase
    //recommenders is the returned ArrayList of java. You can write the HBase operation tool class in Java or Scala by yourself. I won't give the specific code here, it should be able to be written quickly
    //HbaseUtil.saveListMap("recommender", recommenders)
  }


  def outputHdfs(fs:FileSystem,text:String,textdir:String):Unit={
    try{
      val fsDataOutputStream = fs.create(new Path(textdir+"/result.txt"), true);
      val s=text.getBytes("UTF-8")
      fsDataOutputStream.write(s,0,s.length)
      fsDataOutputStream.hflush();
    }catch{
      case e:Exception =>
    }

  }
}


Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=326529563&siteId=291194637