diff --git a/source/constructs/api/catalog/crud.py b/source/constructs/api/catalog/crud.py index c192294b..dbbd7d52 100644 --- a/source/constructs/api/catalog/crud.py +++ b/source/constructs/api/catalog/crud.py @@ -41,7 +41,7 @@ def get_catalog_column_level_classification_by_database( if not query: break for item in query: - key_name = f'{item.table_name}_{item.column_name}' + key_name = f'{item.table_name}.{item.column_name}'.replace(".","_") results[key_name] = item page += 1 return results diff --git a/source/constructs/api/catalog/service.py b/source/constructs/api/catalog/service.py index 2285cd2b..2c490be8 100644 --- a/source/constructs/api/catalog/service.py +++ b/source/constructs/api/catalog/service.py @@ -687,7 +687,7 @@ def __query_job_result_by_athena( # Select result select_sql = ( ( - """SELECT table_name,column_name,cast(identifiers as json) as identifiers_str,CASE WHEN sample_data is NULL then '' else array_join(sample_data, \'|\') end as sample_str, privacy, table_size, s3_location + """SELECT table_name,column_name,cast(identifiers as json) as identifiers_str,CASE WHEN sample_data is NULL then '' else array_join(sample_data, \'|\') end as sample_str, privacy, table_size, s3_location, location FROM %s WHERE account_id='%s' AND region='%s' @@ -838,8 +838,10 @@ def sync_job_detection_result( column_sample_data = __get_athena_column_value(row["Data"][3], "str") privacy = int(__get_athena_column_value(row["Data"][4], "int")) table_size = int(__get_athena_column_value(row["Data"][5], "int")) - column_path = __get_athena_column_value(row["Data"][6], "str") + column_path = __get_athena_column_value(row["Data"][6], "str"), + location = __get_athena_column_value(row["Data"][7], "str"), table_size_dict[table_name] = table_size + table_size_dict[location] = table_size if table_name in table_column_dict: table_column_dict[table_name].append(column_name) else: @@ -908,8 +910,14 @@ def sync_job_detection_result( continue row_count += table_size catalog_table = None - if table_name in database_catalog_table_dict: - catalog_table = database_catalog_table_dict[table_name] + + tmp_database_catalog_table_dict = {} + for key, value in database_catalog_table_dict.items(): + new_key = key.replace(".", "_") + tmp_database_catalog_table_dict[new_key] = value + + if table_name in tmp_database_catalog_table_dict: + catalog_table = tmp_database_catalog_table_dict[table_name] logger.debug( "sync_job_detection_result - RESET ADDITIONAL COLUMNS : " + json.dumps(table_column_dict[table_name])) if table_name not in table_privacy_dict: diff --git a/source/constructs/api/common/enum.py b/source/constructs/api/common/enum.py index f34a6468..ea164f27 100644 --- a/source/constructs/api/common/enum.py +++ b/source/constructs/api/common/enum.py @@ -130,6 +130,7 @@ class MessageEnum(Enum): SOURCE_BATCH_CREATE_LIMIT_ERR = {1260: "Batch operation limit exceeded, please ensure that a maximum of 100 data sources are created at a time."} SOURCE_BATCH_SHEET_NOT_FOUND = {1261: "Sheet [OriginTemplate] not found in the Excel file"} SOURCE_BATCH_SHEET_NO_CONTENT = {1262: "There is no relevant data in sheet [OriginTemplate], please add data according to the format."} + SOURCE_BATCH_SECURITY_GROUP_NOT_CONFIG = {1263: "Admin account doesn't have an Security group named SDPS-CustomDB"} # label LABEL_EXIST_FAILED = {1611: "Cannot create duplicated label"} diff --git a/source/constructs/api/data_source/service.py b/source/constructs/api/data_source/service.py index 00fa2102..2511e6ea 100644 --- a/source/constructs/api/data_source/service.py +++ b/source/constructs/api/data_source/service.py @@ -2725,7 +2725,7 @@ def batch_create(file: UploadFile = File(...)): created_jdbc_list = [] account_set = set() # Check if the file is an Excel file - if not file.filename.endswith('.xlsx'): + if not file.filename.endswith('.xlsx') and not file.filename.endswith('.xlsm'): raise BizException(MessageEnum.SOURCE_BATCH_CREATE_FORMAT_ERR.get_code(), MessageEnum.SOURCE_BATCH_CREATE_FORMAT_ERR.get_msg()) # Read the Excel file @@ -2774,11 +2774,21 @@ def batch_create(file: UploadFile = File(...)): # Query network info if account_set: account_info = list(account_set)[0].split("/") - network = query_account_network(AccountInfo(account_provider_id=account_info[0], account_id=account_info[1], region=account_info[2])) \ - .get('vpcs', [])[0] - vpc_id = network.get('vpcId') - subnets = [subnet.get('subnetId') for subnet in network.get('subnets')] - security_group_id = network.get('securityGroups', [])[0].get('securityGroupId') + networks = query_account_network(AccountInfo(account_provider_id=account_info[0], account_id=account_info[1], region=account_info[2])) \ + .get('vpcs', []) + security_group_id = None + for network in networks: + for securityGroup in network.get('securityGroups',[]): + if securityGroup.get("securityGroupName") == const.SECURITY_GROUP_JDBC: + # vpc_id = network.get('vpcId') + subnets = [subnet.get('subnetId') for subnet in network.get('subnets')] + security_group_id = securityGroup.get("securityGroupId") + break + if security_group_id: + break + if not security_group_id: + raise BizException(MessageEnum.SOURCE_BATCH_SECURITY_GROUP_NOT_CONFIG.get_code(), + MessageEnum.SOURCE_BATCH_SECURITY_GROUP_NOT_CONFIG.get_msg()) created_jdbc_list = __map_network_jdbc(created_jdbc_list, subnets, security_group_id) batch_result = asyncio.run(batch_add_conn_jdbc(created_jdbc_list)) result = {f"{item[0]}/{item[1]}/{item[2]}/{item[3]}": f"{item[4]}/{item[5]}" for item in batch_result} diff --git a/source/portal/src/pages/data-source-connection/componments/JDBCConnection.tsx b/source/portal/src/pages/data-source-connection/componments/JDBCConnection.tsx index 7cf2b198..c008281f 100644 --- a/source/portal/src/pages/data-source-connection/componments/JDBCConnection.tsx +++ b/source/portal/src/pages/data-source-connection/componments/JDBCConnection.tsx @@ -549,7 +549,8 @@ const JDBCConnection: React.FC = ( username: jdbcConnectionData.new.master_username, password: jdbcConnectionData.new.password, secret_id: jdbcConnectionData.new.secret, - ssl_verify_cert: jdbcConnectionData.new.jdbc_enforce_ssl === "true" ? true: false + ssl_verify_cert: + jdbcConnectionData.new.jdbc_enforce_ssl === 'true' ? true : false, }; try { const res: any = await queryJdbcDatabases(requestParam); @@ -866,22 +867,20 @@ const JDBCConnection: React.FC = ( {credential === 'secret' && ( - - - changeSecret(detail.selectedOption) + // setSecretItem(detail.selectedOption) + } + options={secretOption} + /> + + {/* {props.providerId !== 1 && ( */} +
-
- )} + + {/* )} */}
)} {credential === 'password' && ( + gridDefinition={[ + { colspan: 4 }, + { colspan: 5 }, + { colspan: 3 }, + ]} + > = ( }} /> - {/* */} - {props.providerId !== 1 && ( -
- + + {/* {props.providerId !== 1 && ( */} +
+
- )} - {/*
*/} + {/* )} */} + )} = ( alertMsg(t('successUpdate'), 'success'); props.setShowModal(false); } catch (error) { - if(error instanceof Error){ + if (error instanceof Error) { alertMsg(error.message, 'error'); - } else if(error instanceof String){ + } else if (error instanceof String) { alertMsg(error.toString(), 'error'); } else { alertMsg(error as string, 'error'); @@ -510,7 +510,8 @@ const JDBCConnectionEdit: React.FC = ( username: jdbcConnectionData.master_username, password: jdbcConnectionData.password, secret_id: jdbcConnectionData.secret, - ssl_verify_cert: jdbcConnectionData.jdbc_enforce_ssl === "true" ? true: false + ssl_verify_cert: + jdbcConnectionData.jdbc_enforce_ssl === 'true' ? true : false, }; try { const res: any = await queryJdbcDatabases(requestParam); @@ -774,41 +775,43 @@ const JDBCConnectionEdit: React.FC = ( {credential === 'secret' && ( - - - changeSecret(detail.selectedOption) + // setSecretItem(detail.selectedOption) + } + options={secretOption} + /> + + {/* {props.providerId !== 1 && ( */} +
+ +
+ {/* )} */}
)} {credential === 'password' && ( - + = ( }} /> - {props.providerId !== 1 && ( -
- -
- )} + {/* {props.providerId !== 1 && ( */} +
+ +
+ {/* )} */}
)}