diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java index 3bb93ed60d1c..59df850d34d5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java @@ -1705,7 +1705,8 @@ private String generateJDOFilter(org.apache.hadoop.hive.metastore.api.Table tabl assert table != null; ExpressionTree.FilterBuilder filterBuilder = new ExpressionTree.FilterBuilder(true); Map params = new HashMap<>(); - exprTree.generateJDOFilterFragment(conf, params, filterBuilder, table.getPartitionKeys()); + exprTree.accept(new ExpressionTree.JDOFilterGenerator(conf, + table.getPartitionKeys(), filterBuilder, params)); StringBuilder stringBuilder = new StringBuilder(filterBuilder.getFilter()); // replace leading && stringBuilder.replace(0, 4, ""); diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java index 97956660791a..b6004640e7aa 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java @@ -1323,7 +1323,7 @@ public static FilterType fromClass(Object value){ @Override public void visit(LeafNode node) throws MetaException { int partColCount = partitionKeys.size(); - int partColIndex = node.getPartColIndexForFilter(partitionKeys, filterBuffer); + int partColIndex = LeafNode.getPartColIndexForFilter(node.keyName, partitionKeys, filterBuffer); if (filterBuffer.hasError()) { return; } diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 436ebd932acc..cfbf63e2c81e 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -4905,7 +4905,8 @@ private String makeQueryFilterString(String catName, String dbName, Table table, params.put("catName", catName); } - tree.generateJDOFilterFragment(getConf(), params, queryBuilder, table != null ? table.getPartitionKeys() : null); + tree.accept(new ExpressionTree.JDOFilterGenerator(getConf(), + table != null ? table.getPartitionKeys() : null, queryBuilder, params)); if (queryBuilder.hasError()) { assert !isValidatedFilter; LOG.debug("JDO filter pushdown cannot be used: {}", queryBuilder.getErrorMessage()); @@ -4925,7 +4926,7 @@ private String makeQueryFilterString(String catName, String dbName, String tblNa params.put("t1", tblName); params.put("t2", dbName); params.put("t3", catName); - tree.generateJDOFilterFragment(getConf(), params, queryBuilder, partitionKeys); + tree.accept(new ExpressionTree.JDOFilterGenerator(getConf(), partitionKeys, queryBuilder, params)); if (queryBuilder.hasError()) { assert !isValidatedFilter; LOG.debug("JDO filter pushdown cannot be used: {}", queryBuilder.getErrorMessage()); diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java index f2f91cbedfb7..e03273060de3 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java @@ -219,34 +219,6 @@ protected void accept(TreeVisitor visitor) throws MetaException { visitor.visit(this); } - /** - * Generates a JDO filter statement - * @param params - * A map of parameter key to values for the filter statement. - * @param filterBuffer The filter builder that is used to build filter. - * @param partitionKeys - * @throws MetaException - */ - public void generateJDOFilter(Configuration conf, - Map params, FilterBuilder filterBuffer, List partitionKeys) throws MetaException { - if (filterBuffer.hasError()) return; - if (lhs != null) { - filterBuffer.append (" ("); - lhs.generateJDOFilter(conf, params, filterBuffer, partitionKeys); - - if (rhs != null) { - if( andOr == LogicalOperator.AND ) { - filterBuffer.append(" && "); - } else { - filterBuffer.append(" || "); - } - - rhs.generateJDOFilter(conf, params, filterBuffer, partitionKeys); - } - filterBuffer.append (") "); - } - } - @Override public String toString() { return "TreeNode{" + @@ -263,10 +235,11 @@ public String toString() { public static class LeafNode extends TreeNode { public String keyName; public Operator operator; - /** Constant expression side of the operator. Can currently be a String or a Long. */ + /** + * Constant expression side of the operator. Can currently be a String or a Long. + */ public Object value; public boolean isReverseOrder = false; - private static final String PARAM_PREFIX = "hive_filter_param_"; @Override protected void accept(TreeVisitor visitor) throws MetaException { @@ -274,8 +247,100 @@ protected void accept(TreeVisitor visitor) throws MetaException { } @Override - public void generateJDOFilter(Configuration conf, Map params, - FilterBuilder filterBuilder, List partitionKeys) throws MetaException { + public String toString() { + return "LeafNode{" + + "keyName='" + keyName + '\'' + + ", operator='" + operator + '\'' + + ", value=" + value + + (isReverseOrder ? ", isReverseOrder=true" : "") + + '}'; + } + + /** + * Get partition column index in the table partition column list that + * corresponds to the key that is being filtered on by this tree node. + * @param partitionKeys list of partition keys. + * @param filterBuilder filter builder used to report error, if any. + * @return The index. + */ + public static int getPartColIndexForFilter(String partitionKeyName, + List partitionKeys, FilterBuilder filterBuilder) throws MetaException { + assert (partitionKeys.size() > 0); + int partitionColumnIndex; + for (partitionColumnIndex = 0; partitionColumnIndex < partitionKeys.size(); + ++partitionColumnIndex) { + if (partitionKeys.get(partitionColumnIndex).getName().equalsIgnoreCase(partitionKeyName)) { + break; + } + } + if( partitionColumnIndex == partitionKeys.size()) { + filterBuilder.setError("Specified key <" + partitionKeyName + + "> is not a partitioning key for the table"); + return -1; + } + + return partitionColumnIndex; + } + } + + /** + * Generate the JDOQL filter for the given expression tree + */ + public static class JDOFilterGenerator extends TreeVisitor { + + private static final String PARAM_PREFIX = "hive_filter_param_"; + + private Configuration conf; + private List partitionKeys; + // the filter builder to append to. + private FilterBuilder filterBuilder; + // the input map which is updated with the the parameterized values. + // Keys are the parameter names and values are the parameter values + private Map params; + private boolean onParsing = false; + private String keyName; + private Object value; + private Operator operator; + private boolean isReverseOrder; + + public JDOFilterGenerator(Configuration conf, List partitionKeys, + FilterBuilder filterBuilder, Map params) { + this.conf = conf; + this.partitionKeys = partitionKeys; + this.filterBuilder = filterBuilder; + this.params = params; + } + + private void beforeParsing() throws MetaException { + if (!onParsing && !filterBuilder.getFilter().isEmpty()) { + filterBuilder.append(" && "); + } + onParsing = true; + } + + @Override + protected void beginTreeNode(TreeNode node) throws MetaException { + beforeParsing(); + filterBuilder.append("( "); + } + + @Override + protected void midTreeNode(TreeNode node) throws MetaException { + filterBuilder.append((node.getAndOr() == LogicalOperator.AND) ? " && " : " || "); + } + + @Override + protected void endTreeNode(TreeNode node) throws MetaException { + filterBuilder.append(") "); + } + + @Override + protected void visit(LeafNode node) throws MetaException { + beforeParsing(); + keyName = node.keyName; + operator = node.operator; + value = node.value; + isReverseOrder = node.isReverseOrder; if (partitionKeys != null) { generateJDOFilterOverPartitions(conf, params, filterBuilder, partitionKeys); } else { @@ -283,6 +348,11 @@ public void generateJDOFilter(Configuration conf, Map params, } } + @Override + protected boolean shouldStop() { + return filterBuilder.hasError(); + } + //can only support "=" and "!=" for now, because our JDO lib is buggy when // using objects from map.get() private static final Set TABLE_FILTER_OPS = Sets.newHashSet( @@ -360,7 +430,7 @@ private void generateJDOFilterGeneral(Map params, private void generateJDOFilterOverPartitions(Configuration conf, Map params, FilterBuilder filterBuilder, List partitionKeys) throws MetaException { int partitionColumnCount = partitionKeys.size(); - int partitionColumnIndex = getPartColIndexForFilter(partitionKeys, filterBuilder); + int partitionColumnIndex = LeafNode.getPartColIndexForFilter(keyName, partitionKeys, filterBuilder); if (filterBuilder.hasError()) return; boolean canPushDownIntegral = @@ -434,32 +504,6 @@ public boolean canJdoUseStringsWithIntegral() { || (operator == Operator.NOTEQUALS2); } - /** - * Get partition column index in the table partition column list that - * corresponds to the key that is being filtered on by this tree node. - * @param partitionKeys list of partition keys. - * @param filterBuilder filter builder used to report error, if any. - * @return The index. - */ - public int getPartColIndexForFilter( - List partitionKeys, FilterBuilder filterBuilder) throws MetaException { - assert (partitionKeys.size() > 0); - int partitionColumnIndex; - for (partitionColumnIndex = 0; partitionColumnIndex < partitionKeys.size(); - ++partitionColumnIndex) { - if (partitionKeys.get(partitionColumnIndex).getName().equalsIgnoreCase(keyName)) { - break; - } - } - if( partitionColumnIndex == partitionKeys.size()) { - filterBuilder.setError("Specified key <" + keyName + - "> is not a partitioning key for the table"); - return -1; - } - - return partitionColumnIndex; - } - /** * Validates and gets the query parameter for JDO filter pushdown based on the column * and the constant stored in this node. @@ -499,16 +543,6 @@ private String getJdoFilterPushdownParam(int partColIndex, return isStringValue ? (String)val : Long.toString((Long)val); } - - @Override - public String toString() { - return "LeafNode{" + - "keyName='" + keyName + '\'' + - ", operator='" + operator + '\'' + - ", value=" + value + - (isReverseOrder ? ", isReverseOrder=true" : "") + - '}'; - } } public void accept(TreeVisitor treeVisitor) throws MetaException { @@ -618,21 +652,4 @@ public void addLeafNode(LeafNode newNode) { nodeStack.push(newNode); } - /** Generate the JDOQL filter for the given expression tree - * @param params the input map which is updated with the - * the parameterized values. Keys are the parameter names and values - * are the parameter values - * @param filterBuilder the filter builder to append to. - * @param partitionKeys - */ - public void generateJDOFilterFragment(Configuration conf, - Map params, FilterBuilder filterBuilder, List partitionKeys) throws MetaException { - if (root == null) { - return; - } - - filterBuilder.append(" && ( "); - root.generateJDOFilter(conf, params, filterBuilder, partitionKeys); - filterBuilder.append(" )"); - } } diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/PartFilterVisitor.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/PartFilterVisitor.java index 5d68d593c838..1ce0b27eabad 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/PartFilterVisitor.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/PartFilterVisitor.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hive.metastore.parser; -import java.sql.Date; -import java.sql.Timestamp; import java.time.format.DateTimeParseException; import java.util.ArrayList; import java.util.List; @@ -231,25 +229,27 @@ public Long visitIntegerLiteral(PartitionFilterParser.IntegerLiteralContext ctx) } @Override - public Date visitDateLiteral(PartitionFilterParser.DateLiteralContext ctx) { + public String visitDateLiteral(PartitionFilterParser.DateLiteralContext ctx) { PartitionFilterParser.DateContext date = ctx.date(); String dateValue = unquoteString(date.value.getText()); try { - return MetaStoreUtils.convertStringToDate(dateValue); + MetaStoreUtils.convertStringToDate(dateValue); } catch (DateTimeParseException e) { throw new ParseCancellationException(e.getMessage()); } + return dateValue; } @Override - public Timestamp visitTimestampLiteral(PartitionFilterParser.TimestampLiteralContext ctx) { + public String visitTimestampLiteral(PartitionFilterParser.TimestampLiteralContext ctx) { PartitionFilterParser.TimestampContext timestamp = ctx.timestamp(); String timestampValue = unquoteString(timestamp.value.getText()); try { - return MetaStoreUtils.convertStringToTimestamp(timestampValue); + MetaStoreUtils.convertStringToTimestamp(timestampValue); } catch (DateTimeParseException e) { throw new ParseCancellationException(e.getMessage()); } + return timestampValue; } @Override