Skip to content

Commit

Permalink
part1
Browse files Browse the repository at this point in the history
  • Loading branch information
dengzhhu653 committed Dec 11, 2023
1 parent 7e5b5de commit e7e12c6
Show file tree
Hide file tree
Showing 5 changed files with 115 additions and 96 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1705,7 +1705,8 @@ private String generateJDOFilter(org.apache.hadoop.hive.metastore.api.Table tabl
assert table != null;
ExpressionTree.FilterBuilder filterBuilder = new ExpressionTree.FilterBuilder(true);
Map<String, Object> params = new HashMap<>();
exprTree.generateJDOFilterFragment(conf, params, filterBuilder, table.getPartitionKeys());
exprTree.accept(new ExpressionTree.JDOFilterGenerator(conf,
table.getPartitionKeys(), filterBuilder, params));
StringBuilder stringBuilder = new StringBuilder(filterBuilder.getFilter());
// replace leading &&
stringBuilder.replace(0, 4, "");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1323,7 +1323,7 @@ public static FilterType fromClass(Object value){
@Override
public void visit(LeafNode node) throws MetaException {
int partColCount = partitionKeys.size();
int partColIndex = node.getPartColIndexForFilter(partitionKeys, filterBuffer);
int partColIndex = LeafNode.getPartColIndexForFilter(node.keyName, partitionKeys, filterBuffer);
if (filterBuffer.hasError()) {
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4905,7 +4905,8 @@ private String makeQueryFilterString(String catName, String dbName, Table table,
params.put("catName", catName);
}

tree.generateJDOFilterFragment(getConf(), params, queryBuilder, table != null ? table.getPartitionKeys() : null);
tree.accept(new ExpressionTree.JDOFilterGenerator(getConf(),
table != null ? table.getPartitionKeys() : null, queryBuilder, params));
if (queryBuilder.hasError()) {
assert !isValidatedFilter;
LOG.debug("JDO filter pushdown cannot be used: {}", queryBuilder.getErrorMessage());
Expand All @@ -4925,7 +4926,7 @@ private String makeQueryFilterString(String catName, String dbName, String tblNa
params.put("t1", tblName);
params.put("t2", dbName);
params.put("t3", catName);
tree.generateJDOFilterFragment(getConf(), params, queryBuilder, partitionKeys);
tree.accept(new ExpressionTree.JDOFilterGenerator(getConf(), partitionKeys, queryBuilder, params));
if (queryBuilder.hasError()) {
assert !isValidatedFilter;
LOG.debug("JDO filter pushdown cannot be used: {}", queryBuilder.getErrorMessage());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -219,34 +219,6 @@ protected void accept(TreeVisitor visitor) throws MetaException {
visitor.visit(this);
}

/**
* Generates a JDO filter statement
* @param params
* A map of parameter key to values for the filter statement.
* @param filterBuffer The filter builder that is used to build filter.
* @param partitionKeys
* @throws MetaException
*/
public void generateJDOFilter(Configuration conf,
Map<String, Object> params, FilterBuilder filterBuffer, List<FieldSchema> partitionKeys) throws MetaException {
if (filterBuffer.hasError()) return;
if (lhs != null) {
filterBuffer.append (" (");
lhs.generateJDOFilter(conf, params, filterBuffer, partitionKeys);

if (rhs != null) {
if( andOr == LogicalOperator.AND ) {
filterBuffer.append(" && ");
} else {
filterBuffer.append(" || ");
}

rhs.generateJDOFilter(conf, params, filterBuffer, partitionKeys);
}
filterBuffer.append (") ");
}
}

@Override
public String toString() {
return "TreeNode{" +
Expand All @@ -263,26 +235,124 @@ public String toString() {
public static class LeafNode extends TreeNode {
public String keyName;
public Operator operator;
/** Constant expression side of the operator. Can currently be a String or a Long. */
/**
* Constant expression side of the operator. Can currently be a String or a Long.
*/
public Object value;
public boolean isReverseOrder = false;
private static final String PARAM_PREFIX = "hive_filter_param_";

@Override
protected void accept(TreeVisitor visitor) throws MetaException {
visitor.visit(this);
}

@Override
public void generateJDOFilter(Configuration conf, Map<String, Object> params,
FilterBuilder filterBuilder, List<FieldSchema> partitionKeys) throws MetaException {
public String toString() {
return "LeafNode{" +
"keyName='" + keyName + '\'' +
", operator='" + operator + '\'' +
", value=" + value +
(isReverseOrder ? ", isReverseOrder=true" : "") +
'}';
}

/**
* Get partition column index in the table partition column list that
* corresponds to the key that is being filtered on by this tree node.
* @param partitionKeys list of partition keys.
* @param filterBuilder filter builder used to report error, if any.
* @return The index.
*/
public static int getPartColIndexForFilter(String partitionKeyName,
List<FieldSchema> partitionKeys, FilterBuilder filterBuilder) throws MetaException {
assert (partitionKeys.size() > 0);
int partitionColumnIndex;
for (partitionColumnIndex = 0; partitionColumnIndex < partitionKeys.size();
++partitionColumnIndex) {
if (partitionKeys.get(partitionColumnIndex).getName().equalsIgnoreCase(partitionKeyName)) {
break;
}
}
if( partitionColumnIndex == partitionKeys.size()) {
filterBuilder.setError("Specified key <" + partitionKeyName +
"> is not a partitioning key for the table");
return -1;
}

return partitionColumnIndex;
}
}

/**
* Generate the JDOQL filter for the given expression tree
*/
public static class JDOFilterGenerator extends TreeVisitor {

private static final String PARAM_PREFIX = "hive_filter_param_";

private Configuration conf;
private List<FieldSchema> partitionKeys;
// the filter builder to append to.
private FilterBuilder filterBuilder;
// the input map which is updated with the the parameterized values.
// Keys are the parameter names and values are the parameter values
private Map<String, Object> params;
private boolean onParsing = false;
private String keyName;
private Object value;
private Operator operator;
private boolean isReverseOrder;

public JDOFilterGenerator(Configuration conf, List<FieldSchema> partitionKeys,
FilterBuilder filterBuilder, Map<String, Object> params) {
this.conf = conf;
this.partitionKeys = partitionKeys;
this.filterBuilder = filterBuilder;
this.params = params;
}

private void beforeParsing() throws MetaException {
if (!onParsing && !filterBuilder.getFilter().isEmpty()) {
filterBuilder.append(" && ");
}
onParsing = true;
}

@Override
protected void beginTreeNode(TreeNode node) throws MetaException {
beforeParsing();
filterBuilder.append("( ");
}

@Override
protected void midTreeNode(TreeNode node) throws MetaException {
filterBuilder.append((node.getAndOr() == LogicalOperator.AND) ? " && " : " || ");
}

@Override
protected void endTreeNode(TreeNode node) throws MetaException {
filterBuilder.append(") ");
}

@Override
protected void visit(LeafNode node) throws MetaException {
beforeParsing();
keyName = node.keyName;
operator = node.operator;
value = node.value;
isReverseOrder = node.isReverseOrder;
if (partitionKeys != null) {
generateJDOFilterOverPartitions(conf, params, filterBuilder, partitionKeys);
} else {
generateJDOFilterOverTables(params, filterBuilder);
}
}

@Override
protected boolean shouldStop() {
return filterBuilder.hasError();
}

//can only support "=" and "!=" for now, because our JDO lib is buggy when
// using objects from map.get()
private static final Set<Operator> TABLE_FILTER_OPS = Sets.newHashSet(
Expand Down Expand Up @@ -360,7 +430,7 @@ private void generateJDOFilterGeneral(Map<String, Object> params,
private void generateJDOFilterOverPartitions(Configuration conf,
Map<String, Object> params, FilterBuilder filterBuilder, List<FieldSchema> partitionKeys) throws MetaException {
int partitionColumnCount = partitionKeys.size();
int partitionColumnIndex = getPartColIndexForFilter(partitionKeys, filterBuilder);
int partitionColumnIndex = LeafNode.getPartColIndexForFilter(keyName, partitionKeys, filterBuilder);
if (filterBuilder.hasError()) return;

boolean canPushDownIntegral =
Expand Down Expand Up @@ -434,32 +504,6 @@ public boolean canJdoUseStringsWithIntegral() {
|| (operator == Operator.NOTEQUALS2);
}

/**
* Get partition column index in the table partition column list that
* corresponds to the key that is being filtered on by this tree node.
* @param partitionKeys list of partition keys.
* @param filterBuilder filter builder used to report error, if any.
* @return The index.
*/
public int getPartColIndexForFilter(
List<FieldSchema> partitionKeys, FilterBuilder filterBuilder) throws MetaException {
assert (partitionKeys.size() > 0);
int partitionColumnIndex;
for (partitionColumnIndex = 0; partitionColumnIndex < partitionKeys.size();
++partitionColumnIndex) {
if (partitionKeys.get(partitionColumnIndex).getName().equalsIgnoreCase(keyName)) {
break;
}
}
if( partitionColumnIndex == partitionKeys.size()) {
filterBuilder.setError("Specified key <" + keyName +
"> is not a partitioning key for the table");
return -1;
}

return partitionColumnIndex;
}

/**
* Validates and gets the query parameter for JDO filter pushdown based on the column
* and the constant stored in this node.
Expand Down Expand Up @@ -499,16 +543,6 @@ private String getJdoFilterPushdownParam(int partColIndex,

return isStringValue ? (String)val : Long.toString((Long)val);
}

@Override
public String toString() {
return "LeafNode{" +
"keyName='" + keyName + '\'' +
", operator='" + operator + '\'' +
", value=" + value +
(isReverseOrder ? ", isReverseOrder=true" : "") +
'}';
}
}

public void accept(TreeVisitor treeVisitor) throws MetaException {
Expand Down Expand Up @@ -618,21 +652,4 @@ public void addLeafNode(LeafNode newNode) {
nodeStack.push(newNode);
}

/** Generate the JDOQL filter for the given expression tree
* @param params the input map which is updated with the
* the parameterized values. Keys are the parameter names and values
* are the parameter values
* @param filterBuilder the filter builder to append to.
* @param partitionKeys
*/
public void generateJDOFilterFragment(Configuration conf,
Map<String, Object> params, FilterBuilder filterBuilder, List<FieldSchema> partitionKeys) throws MetaException {
if (root == null) {
return;
}

filterBuilder.append(" && ( ");
root.generateJDOFilter(conf, params, filterBuilder, partitionKeys);
filterBuilder.append(" )");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hive.metastore.parser;

import java.sql.Date;
import java.sql.Timestamp;
import java.time.format.DateTimeParseException;
import java.util.ArrayList;
import java.util.List;
Expand Down Expand Up @@ -231,25 +229,27 @@ public Long visitIntegerLiteral(PartitionFilterParser.IntegerLiteralContext ctx)
}

@Override
public Date visitDateLiteral(PartitionFilterParser.DateLiteralContext ctx) {
public String visitDateLiteral(PartitionFilterParser.DateLiteralContext ctx) {
PartitionFilterParser.DateContext date = ctx.date();
String dateValue = unquoteString(date.value.getText());
try {
return MetaStoreUtils.convertStringToDate(dateValue);
MetaStoreUtils.convertStringToDate(dateValue);
} catch (DateTimeParseException e) {
throw new ParseCancellationException(e.getMessage());
}
return dateValue;
}

@Override
public Timestamp visitTimestampLiteral(PartitionFilterParser.TimestampLiteralContext ctx) {
public String visitTimestampLiteral(PartitionFilterParser.TimestampLiteralContext ctx) {
PartitionFilterParser.TimestampContext timestamp = ctx.timestamp();
String timestampValue = unquoteString(timestamp.value.getText());
try {
return MetaStoreUtils.convertStringToTimestamp(timestampValue);
MetaStoreUtils.convertStringToTimestamp(timestampValue);
} catch (DateTimeParseException e) {
throw new ParseCancellationException(e.getMessage());
}
return timestampValue;
}

@Override
Expand Down

0 comments on commit e7e12c6

Please sign in to comment.