-
Notifications
You must be signed in to change notification settings - Fork 398
Expand file tree
/
Copy pathSparkToStringVisitor.java
More file actions
121 lines (113 loc) · 3.9 KB
/
SparkToStringVisitor.java
File metadata and controls
121 lines (113 loc) · 3.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
package sqlancer.spark;
import sqlancer.common.ast.newast.NewToStringVisitor;
import sqlancer.common.ast.newast.TableReferenceNode;
import sqlancer.spark.ast.SparkCastOperation;
import sqlancer.spark.ast.SparkConstant;
import sqlancer.spark.ast.SparkExpression;
import sqlancer.spark.ast.SparkJoin;
import sqlancer.spark.ast.SparkSelect;
public class SparkToStringVisitor extends NewToStringVisitor<SparkExpression> {
@Override
public void visitSpecific(SparkExpression expr) {
if (expr instanceof SparkConstant) {
visit((SparkConstant) expr);
} else if (expr instanceof SparkSelect) {
visit((SparkSelect) expr);
} else if (expr instanceof SparkJoin) {
visit((SparkJoin) expr);
} else if (expr instanceof SparkCastOperation) {
visit((SparkCastOperation) expr);
} else {
throw new AssertionError(expr.getClass());
}
}
private void visit(SparkConstant constant) {
sb.append(constant.toString());
}
private void visit(SparkSelect select) {
sb.append("SELECT ");
if (select.isDistinct()) {
sb.append("DISTINCT ");
}
visit(select.getFetchColumns());
sb.append(" FROM ");
visit(select.getFromList());
if (!select.getFromList().isEmpty() && !select.getJoinList().isEmpty()) {
sb.append(", ");
}
if (!select.getJoinList().isEmpty()) {
visit(select.getJoinList());
}
if (select.getWhereClause() != null) {
sb.append(" WHERE ");
visit(select.getWhereClause());
}
if (!select.getGroupByExpressions().isEmpty()) {
sb.append(" GROUP BY ");
visit(select.getGroupByExpressions());
}
if (select.getHavingClause() != null) {
sb.append(" HAVING ");
visit(select.getHavingClause());
}
if (!select.getOrderByClauses().isEmpty()) {
sb.append(" ORDER BY ");
visit(select.getOrderByClauses());
}
if (select.getLimitClause() != null) {
sb.append(" LIMIT ");
visit(select.getLimitClause());
}
// Spark supports OFFSET, though strictly usually with LIMIT or in newer
// versions
if (select.getOffsetClause() != null) {
sb.append(" OFFSET ");
visit(select.getOffsetClause());
}
}
private void visit(SparkJoin join) {
switch (join.getJoinType()) {
case INNER:
sb.append(" INNER JOIN ");
break;
case LEFT_OUTER:
sb.append(" LEFT JOIN ");
break;
case RIGHT_OUTER:
sb.append(" RIGHT JOIN ");
break;
case FULL_OUTER:
sb.append(" FULL JOIN ");
break;
case LEFT_SEMI:
sb.append(" LEFT SEMI JOIN ");
break;
// Spark also supports LEFT ANTI, which Hive might lack in some older versions
case LEFT_ANTI:
sb.append(" LEFT ANTI JOIN ");
break;
case CROSS:
sb.append(" CROSS JOIN ");
break;
default:
throw new UnsupportedOperationException("Join type not supported in Spark visitor: " + join.getJoinType());
}
visit((TableReferenceNode<SparkExpression, SparkSchema.SparkTable>) join.getRightTable());
if (join.getOnClause() != null) {
sb.append(" ON ");
visit(join.getOnClause());
}
}
private void visit(SparkCastOperation cast) {
sb.append("CAST(");
visit(cast.getExpression());
sb.append(" AS ");
sb.append(cast.getType());
sb.append(")");
}
public static String asString(SparkExpression expr) {
SparkToStringVisitor visitor = new SparkToStringVisitor();
visitor.visit(expr);
return visitor.get();
}
}