test.scala 1.84 KB
import org.apache.spark.ml.evaluation.RegressionEvaluator
import org.apache.spark.ml.recommendation.ALS
import java.util.Properties
import org.apache.spark.sql.SQLContext
import org.apache.http.impl.client.HttpClientBuilder
import org.apache.http.HttpHeaders
import org.apache.http.entity.StringEntity
import org.apache.commons.io.IOUtils
import org.apache.http.client.methods.HttpPost
import spark.implicits._
import scala.util.parsing.json._
import com.google.gson.Gson;


val sqlContext = new org.apache.spark.sql.SQLContext(sc)
val ratingDB = sqlContext.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306/database_development?characterEncoding=UTF-8&serverTimezone=UTC").option("dbtable", "ratings").option("user", "root").option("password", "asdasd2134").load()
val Array(training, test) = ratingDB.randomSplit(Array(0.8, 0.2))


val als = new ALS()
  .setMaxIter(5)
  .setRegParam(0.01)
  .setUserCol("user_id")
  .setItemCol("movie_id")
  .setRatingCol("rating")
val model = als.fit(training)


model.setColdStartStrategy("drop")
val predictions = model.transform(test)

val evaluator = new RegressionEvaluator()
  .setMetricName("rmse")
  .setLabelCol("rating")
  .setPredictionCol("prediction")
val rmse = evaluator.evaluate(predictions)
println(s"Root-mean-square error = $rmse")


val dF = Seq(user_id).toDF("user_id");
val userSubsetRecs = model.recommendForUserSubset(dF, 10)
val sendData = userSubsetRecs.select("recommendations").toJSON.take(2)(0)

val client = HttpClientBuilder.create().build()
val post = new HttpPost("http://localhost:3002")
post.addHeader(HttpHeaders.CONTENT_TYPE,"application/json")
post.setEntity(new StringEntity(sendData))
val response = client.execute(post)
val entity = response.getEntity()
println(Seq(response.getStatusLine.getStatusCode(), response.getStatusLine.getReasonPhrase()))
println(IOUtils.toString(entity.getContent()))