|
| 1 | +// Copyright 2021-present StarRocks, Inc. All rights reserved. |
| 2 | +// |
| 3 | +// Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +// you may not use this file except in compliance with the License. |
| 5 | +// You may obtain a copy of the License at |
| 6 | +// |
| 7 | +// https://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +// |
| 9 | +// Unless required by applicable law or agreed to in writing, software |
| 10 | +// distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +// See the License for the specific language governing permissions and |
| 13 | +// limitations under the License. |
| 14 | + |
| 15 | +package com.starrocks.connector.jdbc; |
| 16 | + |
| 17 | +import com.google.common.collect.ImmutableSet; |
| 18 | +import com.starrocks.catalog.PrimitiveType; |
| 19 | +import com.starrocks.catalog.ScalarType; |
| 20 | +import com.starrocks.catalog.Type; |
| 21 | +import com.starrocks.connector.exception.StarRocksConnectorException; |
| 22 | + |
| 23 | +import java.sql.Connection; |
| 24 | +import java.sql.ResultSet; |
| 25 | +import java.sql.SQLException; |
| 26 | +import java.sql.Types; |
| 27 | +import java.util.Arrays; |
| 28 | +import java.util.Collection; |
| 29 | +import java.util.HashSet; |
| 30 | +import java.util.Map; |
| 31 | +import java.util.Set; |
| 32 | + |
| 33 | +public class ClickhouseSchemaResolver extends JDBCSchemaResolver { |
| 34 | + Map<String, String> properties; |
| 35 | + |
| 36 | + public static final Set<String> SUPPORTED_TABLE_TYPES = new HashSet<>( |
| 37 | + Arrays.asList("LOG TABLE", "MEMORY TABLE", "TEMPORARY TABLE", "VIEW", "DICTIONARY", "SYSTEM TABLE", |
| 38 | + "REMOTE TABLE", "TABLE")); |
| 39 | + |
| 40 | + public ClickhouseSchemaResolver(Map<String, String> properties) { |
| 41 | + this.properties = properties; |
| 42 | + } |
| 43 | + |
| 44 | + @Override |
| 45 | + public Collection<String> listSchemas(Connection connection) { |
| 46 | + try (ResultSet resultSet = connection.getMetaData().getSchemas()) { |
| 47 | + ImmutableSet.Builder<String> schemaNames = ImmutableSet.builder(); |
| 48 | + while (resultSet.next()) { |
| 49 | + String schemaName = resultSet.getString("TABLE_SCHEM"); |
| 50 | + // skip internal schemas |
| 51 | + if (!schemaName.equalsIgnoreCase("INFORMATION_SCHEMA") && !schemaName.equalsIgnoreCase("system")) { |
| 52 | + schemaNames.add(schemaName); |
| 53 | + } |
| 54 | + } |
| 55 | + return schemaNames.build(); |
| 56 | + } catch (SQLException e) { |
| 57 | + throw new StarRocksConnectorException(e.getMessage()); |
| 58 | + } |
| 59 | + } |
| 60 | + |
| 61 | + |
| 62 | + @Override |
| 63 | + public ResultSet getTables(Connection connection, String dbName) throws SQLException { |
| 64 | + String tableTypes = properties.get("table_types"); |
| 65 | + if (null != tableTypes) { |
| 66 | + String[] tableTypesArray = tableTypes.split(","); |
| 67 | + if (tableTypesArray.length == 0) { |
| 68 | + throw new StarRocksConnectorException("table_types should be populated with table types separated by " + |
| 69 | + "comma, e.g. 'TABLE,VIEW'. Currently supported type includes:" + |
| 70 | + String.join(",", SUPPORTED_TABLE_TYPES)); |
| 71 | + } |
| 72 | + |
| 73 | + for (String tt : tableTypesArray) { |
| 74 | + if (!SUPPORTED_TABLE_TYPES.contains(tt)) { |
| 75 | + throw new StarRocksConnectorException("Unsupported table type found: " + tt, |
| 76 | + ",Currently supported table types includes:" + String.join(",", SUPPORTED_TABLE_TYPES)); |
| 77 | + } |
| 78 | + } |
| 79 | + return connection.getMetaData().getTables(connection.getCatalog(), dbName, null, tableTypesArray); |
| 80 | + } |
| 81 | + return connection.getMetaData().getTables(connection.getCatalog(), dbName, null, |
| 82 | + SUPPORTED_TABLE_TYPES.toArray(new String[SUPPORTED_TABLE_TYPES.size()])); |
| 83 | + |
| 84 | + } |
| 85 | + |
| 86 | + @Override |
| 87 | + public ResultSet getColumns(Connection connection, String dbName, String tblName) throws SQLException { |
| 88 | + return connection.getMetaData().getColumns(connection.getCatalog(), dbName, tblName, "%"); |
| 89 | + } |
| 90 | + |
| 91 | + |
| 92 | + @Override |
| 93 | + public Type convertColumnType(int dataType, String typeName, int columnSize, int digits) { |
| 94 | + PrimitiveType primitiveType; |
| 95 | + switch (dataType) { |
| 96 | + case Types.TINYINT: |
| 97 | + primitiveType = PrimitiveType.TINYINT; |
| 98 | + break; |
| 99 | + case Types.SMALLINT: |
| 100 | + primitiveType = PrimitiveType.SMALLINT; |
| 101 | + break; |
| 102 | + case Types.INTEGER: |
| 103 | + primitiveType = PrimitiveType.INT; |
| 104 | + break; |
| 105 | + case Types.BIGINT: |
| 106 | + primitiveType = PrimitiveType.BIGINT; |
| 107 | + break; |
| 108 | + case Types.NUMERIC: |
| 109 | + primitiveType = PrimitiveType.LARGEINT; |
| 110 | + break; |
| 111 | + case Types.FLOAT: |
| 112 | + primitiveType = PrimitiveType.FLOAT; |
| 113 | + break; |
| 114 | + case Types.DOUBLE: |
| 115 | + primitiveType = PrimitiveType.DOUBLE; |
| 116 | + break; |
| 117 | + case Types.BOOLEAN: |
| 118 | + primitiveType = PrimitiveType.BOOLEAN; |
| 119 | + break; |
| 120 | + case Types.VARCHAR: |
| 121 | + return ScalarType.createVarcharType(65533); |
| 122 | + case Types.DATE: |
| 123 | + primitiveType = PrimitiveType.DATE; |
| 124 | + break; |
| 125 | + case Types.TIMESTAMP: |
| 126 | + primitiveType = PrimitiveType.DATETIME; |
| 127 | + break; |
| 128 | + case Types.DECIMAL: |
| 129 | + // Decimal(9,9), first 9 is precision, second 9 is scale |
| 130 | + String[] precisionAndScale = |
| 131 | + typeName.replace("Decimal", "").replace("(", "") |
| 132 | + .replace(")", "").replace(" ", "") |
| 133 | + .split(","); |
| 134 | + if (precisionAndScale.length != 2) { |
| 135 | + // should not go here, but if it does, we make it DECIMALV2. |
| 136 | + throw new StarRocksConnectorException( |
| 137 | + "Cannot extract precision and scale from Decimal typename:" + typeName); |
| 138 | + } else { |
| 139 | + int precision = Integer.parseInt(precisionAndScale[0]); |
| 140 | + int scale = Integer.parseInt(precisionAndScale[1]); |
| 141 | + return ScalarType.createUnifiedDecimalType(precision, scale); |
| 142 | + } |
| 143 | + default: |
| 144 | + primitiveType = PrimitiveType.UNKNOWN_TYPE; |
| 145 | + break; |
| 146 | + } |
| 147 | + return ScalarType.createType(primitiveType); |
| 148 | + } |
| 149 | + |
| 150 | + |
| 151 | +} |
0 commit comments