@@ -50,30 +50,28 @@ public void testSparkRead() {
50
50
}
51
51
52
52
// Execute the TPC-H queries
53
- var plan = spark .sql (
54
- """
55
- select
56
- l_returnflag,
57
- l_linestatus,
58
- sum(l_quantity) as sum_qty,
59
- sum(l_extendedprice) as sum_base_price,
60
- sum(l_extendedprice * (1 - l_discount)) as sum_disc_price,
61
- sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge,
62
- avg(l_quantity) as avg_qty,
63
- avg(l_extendedprice) as avg_price,
64
- avg(l_discount) as avg_disc,
65
- count(*) as count_order
66
- from
67
- lineitem
68
- where
69
- l_shipdate <= date '1998-09-02'
70
- group by
71
- l_returnflag,
72
- l_linestatus
73
- order by
74
- l_returnflag,
75
- l_linestatus
76
- """ );
53
+ var q1 = "select\n " + " l_returnflag,\n "
54
+ + " l_linestatus,\n "
55
+ + " sum(l_quantity) as sum_qty,\n "
56
+ + " sum(l_extendedprice) as sum_base_price,\n "
57
+ + " sum(l_extendedprice * (1 - l_discount)) as sum_disc_price,\n "
58
+ + " sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge,\n "
59
+ + " avg(l_quantity) as avg_qty,\n "
60
+ + " avg(l_extendedprice) as avg_price,\n "
61
+ + " avg(l_discount) as avg_disc,\n "
62
+ + " count(*) as count_order\n "
63
+ + "from\n "
64
+ + " lineitem\n "
65
+ + "where\n "
66
+ + " l_shipdate <= date '1998-09-02'\n "
67
+ + "group by\n "
68
+ + " l_returnflag,\n "
69
+ + " l_linestatus\n "
70
+ + "order by\n "
71
+ + " l_returnflag,\n "
72
+ + " l_linestatus\n " ;
73
+
74
+ var plan = spark .sql (q1 );
77
75
78
76
long start = System .nanoTime ();
79
77
var results = plan .collectAsList ();
0 commit comments