신은섭(Shin Eun Seop)
Committed by GitHub

Merge pull request #16 from Java-Cesco/calcDelta

Calc delta
File mode changed
...@@ -9,8 +9,5 @@ ...@@ -9,8 +9,5 @@
9 <module name="Detecting_fraud_clicks" /> 9 <module name="Detecting_fraud_clicks" />
10 </profile> 10 </profile>
11 </annotationProcessing> 11 </annotationProcessing>
12 - <bytecodeTargetLevel target="9">
13 - <module name="Detecting_fraud_clicks" target="1.8" />
14 - </bytecodeTargetLevel>
15 </component> 12 </component>
16 </project> 13 </project>
...\ No newline at end of file ...\ No newline at end of file
......
1 +<?xml version="1.0" encoding="UTF-8"?>
2 +<project version="4">
3 + <component name="MarkdownExportedFiles">
4 + <htmlFiles />
5 + <imageFiles />
6 + <otherFiles />
7 + </component>
8 +</project>
...\ No newline at end of file ...\ No newline at end of file
1 <?xml version="1.0" encoding="UTF-8"?> 1 <?xml version="1.0" encoding="UTF-8"?>
2 <project version="4"> 2 <project version="4">
3 - <component name="JavaScriptSettings"> 3 + <component name="ExternalStorageConfigurationManager" enabled="true" />
4 - <option name="languageLevel" value="ES6" /> 4 + <component name="MavenProjectsManager">
5 + <option name="originalFiles">
6 + <list>
7 + <option value="$PROJECT_DIR$/pom.xml" />
8 + </list>
9 + </option>
10 + </component>
11 + <component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" project-jdk-name="1.8" project-jdk-type="JavaSDK">
12 + <output url="file://$PROJECT_DIR$/out" />
5 </component> 13 </component>
6 <component name="MavenProjectsManager"> 14 <component name="MavenProjectsManager">
7 <option name="originalFiles"> 15 <option name="originalFiles">
......
1 -<?xml version="1.0" encoding="UTF-8"?>
2 -<project version="4">
3 - <component name="ProjectModuleManager">
4 - <modules>
5 - <module fileurl="file://$PROJECT_DIR$/.idea/Detecting_fraud_clicks.iml" filepath="$PROJECT_DIR$/.idea/Detecting_fraud_clicks.iml" />
6 - </modules>
7 - </component>
8 -</project>
...\ No newline at end of file ...\ No newline at end of file
1 <?xml version="1.0" encoding="UTF-8"?> 1 <?xml version="1.0" encoding="UTF-8"?>
2 <project version="4"> 2 <project version="4">
3 <component name="VcsDirectoryMappings"> 3 <component name="VcsDirectoryMappings">
4 - <mapping directory="" vcs="Git" /> 4 + <mapping directory="$PROJECT_DIR$" vcs="Git" />
5 </component> 5 </component>
6 </project> 6 </project>
...\ No newline at end of file ...\ No newline at end of file
......
File mode changed
File mode changed
File mode changed
1 -import org.apache.spark.SparkConf;
2 -import org.apache.spark.api.java.JavaRDD;
3 -import org.apache.spark.api.java.JavaSparkContext;
4 -import scala.Tuple2;
5 -
6 -import java.util.Arrays;
7 -import java.util.List;
8 -
9 -public class MapExample {
10 -
11 - static SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("Cesco");
12 - static JavaSparkContext sc = new JavaSparkContext(conf);
13 -
14 - public static void main(String[] args) throws Exception {
15 -
16 - // Parallelized with 2 partitions
17 - JavaRDD<String> x = sc.parallelize(
18 - Arrays.asList("spark", "rdd", "example", "sample", "example"),
19 - 2);
20 -
21 - // Word Count Map Example
22 - JavaRDD<Tuple2<String, Integer>> y1 = x.map(e -> new Tuple2<>(e, 1));
23 - List<Tuple2<String, Integer>> list1 = y1.collect();
24 -
25 - // Another example of making tuple with string and it's length
26 - JavaRDD<Tuple2<String, Integer>> y2 = x.map(e -> new Tuple2<>(e, e.length()));
27 - List<Tuple2<String, Integer>> list2 = y2.collect();
28 -
29 - System.out.println(list1);
30 - }
31 -}
1 +import org.apache.spark.SparkConf;
2 +import org.apache.spark.api.java.JavaSparkContext;
3 +import org.apache.spark.sql.Dataset;
4 +import org.apache.spark.sql.Row;
5 +import org.apache.spark.sql.SparkSession;
6 +import org.apache.spark.sql.expressions.Window;
7 +import org.apache.spark.sql.expressions.WindowSpec;
8 +
9 +import javax.xml.crypto.Data;
10 +
11 +import static org.apache.spark.sql.functions.*;
12 +
13 +public class calForwardTimeDelta {
14 + static SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("Cesco");
15 + static JavaSparkContext sc = new JavaSparkContext(conf);
16 +
17 + public static void main(String[] args) throws Exception{
18 + //Create Session
19 + SparkSession spark = SparkSession
20 + .builder()
21 + .appName("Detecting Fraud Clicks")
22 + .getOrCreate();
23 +
24 + //run methods hereu
25 + calcDelta(spark);
26 + }
27 +
28 + private static void calcDelta(SparkSession spark){
29 + // put the path the file you gonna deal with being placed
30 + String filepath = "train_sample.csv";
31 +
32 + // create Dataset from files
33 + Dataset<Row> logDF = spark.read()
34 + .format("csv")
35 + .option("inferSchema", "true")
36 + .option("header","true")
37 + .load(filepath);
38 +
39 + // cast timestamp(click_time, attributed_time) type to long type
40 +
41 + //add column for long(click_time)
42 + Dataset<Row> newDF = logDF.withColumn("utc_click_time", logDF.col("click_time").cast("long"));
43 + //add column for long(attributed_time)
44 + newDF = newDF.withColumn("utc_attributed_time", logDF.col("attributed_time").cast("long"));
45 + //drop timestamp type columns
46 + newDF = newDF.drop("click_time").drop("attributed_time");
47 + newDF.createOrReplaceTempView("logs");
48 +
49 + WindowSpec w = Window.partitionBy ("ip")
50 + .orderBy("utc_click_time");
51 +
52 + newDF = newDF.withColumn("lag(utc_click_time)", lag("utc_click_time",1).over(w));
53 + newDF.where("ip=10").show();
54 + newDF = newDF.withColumn("delta", when(col("lag(utc_click_time)").isNull(),lit(0)).otherwise(col("utc_click_time")).minus(when(col("lag(utc_click_time)").isNull(),lit(0)).otherwise(col("lag(utc_click_time)"))));
55 + //newDF = newDF.withColumn("delta", datediff());
56 + newDF = newDF.drop("lag(utc_click_time)");
57 + newDF = newDF.orderBy("ip");
58 +
59 + newDF.show();
60 + }
61 +
62 +}
1 -public class valid {
2 - private int x;
3 -
4 - valid() {
5 - x = 0;
6 - }
7 -
8 - void printX(){
9 - System.out.println(x);
10 - }
11 -
12 - public static void main(String[] args){
13 - valid v = new valid();
14 - v.printX();
15 - }
16 -
17 -}
File mode changed