Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature] support create/drop/query iceberg view for hive catalog #55448

Merged
merged 1 commit into from
Jan 27, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -188,8 +188,8 @@ public void renameTable(String dbName, String tblName, String newTblName) throws
}

@Override
public boolean createView(ConnectorViewDefinition connectorViewDefinition, boolean replace) {
return delegate.createView(connectorViewDefinition, replace);
public boolean createView(String catalogName, ConnectorViewDefinition connectorViewDefinition, boolean replace) {
return delegate.createView(catalogName, connectorViewDefinition, replace);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

package com.starrocks.connector.iceberg;

import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.starrocks.catalog.ArrayType;
import com.starrocks.catalog.Column;
Expand All @@ -25,8 +26,11 @@
import com.starrocks.catalog.StructField;
import com.starrocks.catalog.StructType;
import com.starrocks.catalog.Type;
import com.starrocks.connector.ConnectorViewDefinition;
import com.starrocks.connector.exception.StarRocksConnectorException;
import com.starrocks.connector.hive.RemoteFileInputFormat;
import com.starrocks.qe.ConnectContext;
import com.starrocks.server.GlobalStateMgr;
import com.starrocks.thrift.TIcebergColumnStats;
import com.starrocks.thrift.TIcebergDataFile;
import com.starrocks.thrift.TIcebergSchema;
Expand Down Expand Up @@ -425,4 +429,24 @@ public static List<StructField> getPartitionColumns(List<PartitionField> fields,
public static Namespace convertDbNameToNamespace(String dbName) {
return Namespace.of(dbName.split("\\."));
}

public static Map<String, String> buildViewProperties(ConnectorViewDefinition definition, String catalogName) {
ConnectContext connectContext = ConnectContext.get();
if (connectContext == null) {
throw new StarRocksConnectorException("not found connect context when building iceberg view properties");
}

String queryId = connectContext.getQueryId().toString();

Map<String, String> properties = com.google.common.collect.ImmutableMap.of(
"queryId", queryId,
"starrocksCatalog", catalogName,
"starrocksVersion", GlobalStateMgr.getCurrentState().getNodeMgr().getMySelf().getFeVersion());

if (!Strings.isNullOrEmpty(definition.getComment())) {
properties.put(IcebergMetadata.COMMENT, definition.getComment());
}

return properties;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import com.starrocks.connector.exception.StarRocksConnectorException;
import com.starrocks.memory.MemoryTrackable;
import org.apache.iceberg.FileScanTask;
import org.apache.iceberg.MetadataTableType;
import org.apache.iceberg.MetadataTableUtils;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.PartitionsTable;
Expand All @@ -35,10 +36,12 @@
import org.apache.iceberg.Table;
import org.apache.iceberg.TableScan;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.exceptions.NoSuchTableException;
import org.apache.iceberg.io.CloseableIterable;
import org.apache.iceberg.util.StructProjection;
import org.apache.iceberg.view.View;
import org.apache.iceberg.view.ViewBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

Expand All @@ -49,6 +52,10 @@
import java.util.Map;
import java.util.concurrent.ExecutorService;

import static com.google.common.base.Preconditions.checkArgument;
import static com.starrocks.connector.iceberg.IcebergApiConverter.buildViewProperties;
import static com.starrocks.connector.iceberg.IcebergApiConverter.convertDbNameToNamespace;
import static com.starrocks.connector.iceberg.IcebergMetadata.LOCATION_PROPERTY;
import static org.apache.iceberg.StarRocksIcebergTableScan.newTableScanContext;

public interface IcebergCatalog extends MemoryTrackable {
Expand Down Expand Up @@ -99,7 +106,31 @@ default boolean tableExists(String dbName, String tableName) throws StarRocksCon
}
}

default boolean createView(ConnectorViewDefinition connectorViewDefinition, boolean replace) {
default boolean createView(String catalogName, ConnectorViewDefinition connectorViewDefinition, boolean replace) {
return createViewDefault(connectorViewDefinition.getDatabaseName(), connectorViewDefinition, replace);
}

default boolean createViewDefault(String catalogName, ConnectorViewDefinition definition, boolean replace) {
Schema schema = IcebergApiConverter.toIcebergApiSchema(definition.getColumns());
Namespace ns = convertDbNameToNamespace(definition.getDatabaseName());
ViewBuilder viewBuilder = getViewBuilder(TableIdentifier.of(ns, definition.getViewName()));
viewBuilder = viewBuilder.withSchema(schema)
.withQuery("starrocks", definition.getInlineViewDef())
.withDefaultNamespace(ns)
.withDefaultCatalog(definition.getCatalogName())
.withProperties(buildViewProperties(definition, catalogName))
.withLocation(defaultTableLocation(ns, definition.getViewName()));

if (replace) {
viewBuilder.createOrReplace();
} else {
viewBuilder.create();
}

return true;
}

default ViewBuilder getViewBuilder(TableIdentifier identifier) {
throw new StarRocksConnectorException("This catalog doesn't support creating views");
}

Expand Down Expand Up @@ -132,10 +163,18 @@ default StarRocksIcebergTableScan getTableScan(Table table, StarRocksIcebergTabl
}

default String defaultTableLocation(Namespace ns, String tableName) {
return "";
Map<String, String> properties = loadNamespaceMetadata(ns);
String databaseLocation = properties.get(LOCATION_PROPERTY);
checkArgument(databaseLocation != null, "location must be set for %s.%s", ns, tableName);

if (databaseLocation.endsWith("/")) {
return databaseLocation + tableName;
} else {
return databaseLocation + "/" + tableName;
}
}

default Map<String, Object> loadNamespaceMetadata(Namespace ns) {
default Map<String, String> loadNamespaceMetadata(Namespace ns) {
return new HashMap<>();
}

Expand All @@ -152,7 +191,7 @@ default Map<String, Partition> getPartitions(IcebergTable icebergTable, long sna
Table nativeTable = icebergTable.getNativeTable();
Map<String, Partition> partitionMap = Maps.newHashMap();
PartitionsTable partitionsTable = (PartitionsTable) MetadataTableUtils.
createMetadataTableInstance(nativeTable, org.apache.iceberg.MetadataTableType.PARTITIONS);
createMetadataTableInstance(nativeTable, MetadataTableType.PARTITIONS);
TableScan tableScan = partitionsTable.newScan();
if (snapshotId != -1) {
tableScan = tableScan.useSnapshot(snapshotId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ public void createView(CreateViewStmt stmt) throws DdlException {
}

ConnectorViewDefinition viewDefinition = ConnectorViewDefinition.fromCreateViewStmt(stmt);
icebergCatalog.createView(viewDefinition, stmt.isReplace());
icebergCatalog.createView(catalogName, viewDefinition, stmt.isReplace());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.starrocks.catalog.Database;
import com.starrocks.common.Config;
Expand All @@ -41,6 +42,8 @@
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.hive.HiveCatalog;
import org.apache.iceberg.view.View;
import org.apache.iceberg.view.ViewBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

Expand All @@ -52,6 +55,7 @@
import java.util.stream.Collectors;

import static com.starrocks.connector.ConnectorTableId.CONNECTOR_ID_GENERATOR;
import static com.starrocks.connector.iceberg.IcebergApiConverter.convertDbNameToNamespace;
import static com.starrocks.connector.iceberg.IcebergCatalogProperties.HIVE_METASTORE_TIMEOUT;
import static com.starrocks.connector.iceberg.IcebergCatalogProperties.HIVE_METASTORE_URIS;
import static com.starrocks.connector.iceberg.IcebergCatalogProperties.ICEBERG_METASTORE_URIS;
Expand Down Expand Up @@ -93,6 +97,12 @@ public IcebergHiveCatalog(String name, Configuration conf, Map<String, String> p
delegate = (HiveCatalog) CatalogUtil.loadCatalog(HiveCatalog.class.getName(), name, copiedProperties, conf);
}

@VisibleForTesting
public IcebergHiveCatalog(HiveCatalog hiveCatalog, Configuration conf) {
this.delegate = hiveCatalog;
this.conf = conf;
}

@Override
public IcebergCatalogType getIcebergCatalogType() {
return IcebergCatalogType.HIVE_CATALOG;
Expand Down Expand Up @@ -201,6 +211,26 @@ public void renameTable(String dbName, String tblName, String newTblName) throws
delegate.renameTable(TableIdentifier.of(dbName, tblName), TableIdentifier.of(dbName, newTblName));
}

@Override
public ViewBuilder getViewBuilder(TableIdentifier identifier) {
return delegate.buildView(identifier);
}

@Override
public boolean dropView(String dbName, String viewName) {
return delegate.dropView(TableIdentifier.of(convertDbNameToNamespace(dbName), viewName));
}

@Override
public View getView(String dbName, String viewName) {
return delegate.loadView(TableIdentifier.of(convertDbNameToNamespace(dbName), viewName));
}

@Override
public Map<String, String> loadNamespaceMetadata(Namespace ns) {
return ImmutableMap.copyOf(delegate.loadNamespaceMetadata(ns));
}

@Override
public void deleteUncommittedDataFiles(List<String> fileLocations) {
if (fileLocations.isEmpty()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import com.starrocks.common.MetaNotFoundException;
import com.starrocks.connector.ConnectorViewDefinition;
import com.starrocks.connector.exception.StarRocksConnectorException;
import com.starrocks.connector.iceberg.IcebergApiConverter;
import com.starrocks.connector.iceberg.IcebergCatalog;
import com.starrocks.connector.iceberg.IcebergCatalogType;
import com.starrocks.connector.iceberg.cost.IcebergMetricsReporter;
Expand Down Expand Up @@ -54,7 +53,6 @@
import java.util.Map;
import java.util.stream.Collectors;

import static com.google.common.base.Preconditions.checkArgument;
import static com.starrocks.connector.ConnectorTableId.CONNECTOR_ID_GENERATOR;
import static com.starrocks.connector.iceberg.IcebergApiConverter.convertDbNameToNamespace;
import static com.starrocks.connector.iceberg.IcebergCatalogProperties.ICEBERG_CUSTOM_PROPERTIES_PREFIX;
Expand Down Expand Up @@ -226,24 +224,8 @@ public void renameTable(String dbName, String tblName, String newTblName) throws
}

@Override
public boolean createView(ConnectorViewDefinition definition, boolean replace) {
Schema schema = IcebergApiConverter.toIcebergApiSchema(definition.getColumns());
Namespace ns = convertDbNameToNamespace(definition.getDatabaseName());
ViewBuilder viewBuilder = delegate.buildView(TableIdentifier.of(ns, definition.getViewName()));
viewBuilder = viewBuilder.withSchema(schema)
.withQuery("starrocks", definition.getInlineViewDef())
.withDefaultNamespace(ns)
.withDefaultCatalog(definition.getCatalogName())
.withProperties(buildProperties(definition))
.withLocation(defaultTableLocation(ns, definition.getViewName()));

if (replace) {
viewBuilder.createOrReplace();
} else {
viewBuilder.create();
}

return true;
public ViewBuilder getViewBuilder(TableIdentifier identifier) {
return delegate.buildView(identifier);
}

@Override
Expand Down Expand Up @@ -279,20 +261,7 @@ public String toString() {
}

@Override
public String defaultTableLocation(Namespace ns, String tableName) {
Map<String, String> properties = delegate.loadNamespaceMetadata(ns);
String databaseLocation = properties.get(LOCATION_PROPERTY);
checkArgument(databaseLocation != null, "location must be set for %s.%s", ns, tableName);

if (databaseLocation.endsWith("/")) {
return databaseLocation + tableName;
} else {
return databaseLocation + "/" + tableName;
}
}

@Override
public Map<String, Object> loadNamespaceMetadata(Namespace ns) {
public Map<String, String> loadNamespaceMetadata(Namespace ns) {
return ImmutableMap.copyOf(delegate.loadNamespaceMetadata(ns));
}

Expand Down
Loading
Loading