Skip to content

Commit

Permalink
bugfix:catalog Row_count is 0
Browse files Browse the repository at this point in the history
  • Loading branch information
530051970 committed May 13, 2024
1 parent a8bc0ad commit 1c4c1f3
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 7 deletions.
6 changes: 4 additions & 2 deletions source/constructs/api/catalog/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -687,7 +687,7 @@ def __query_job_result_by_athena(
# Select result
select_sql = (
(
"""SELECT table_name,column_name,cast(identifiers as json) as identifiers_str,CASE WHEN sample_data is NULL then '' else array_join(sample_data, \'|\') end as sample_str, privacy, table_size, s3_location
"""SELECT table_name,column_name,cast(identifiers as json) as identifiers_str,CASE WHEN sample_data is NULL then '' else array_join(sample_data, \'|\') end as sample_str, privacy, table_size, s3_location, location
FROM %s
WHERE account_id='%s'
AND region='%s'
Expand Down Expand Up @@ -838,8 +838,10 @@ def sync_job_detection_result(
column_sample_data = __get_athena_column_value(row["Data"][3], "str")
privacy = int(__get_athena_column_value(row["Data"][4], "int"))
table_size = int(__get_athena_column_value(row["Data"][5], "int"))
column_path = __get_athena_column_value(row["Data"][6], "str")
column_path = __get_athena_column_value(row["Data"][6], "str"),
location = __get_athena_column_value(row["Data"][7], "str"),
table_size_dict[table_name] = table_size
table_size_dict[location] = table_size
if table_name in table_column_dict:
table_column_dict[table_name].append(column_name)
else:
Expand Down
1 change: 1 addition & 0 deletions source/constructs/api/common/enum.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ class MessageEnum(Enum):
SOURCE_BATCH_CREATE_LIMIT_ERR = {1260: "Batch operation limit exceeded, please ensure that a maximum of 100 data sources are created at a time."}
SOURCE_BATCH_SHEET_NOT_FOUND = {1261: "Sheet [OriginTemplate] not found in the Excel file"}
SOURCE_BATCH_SHEET_NO_CONTENT = {1262: "There is no relevant data in sheet [OriginTemplate], please add data according to the format."}
SOURCE_BATCH_SECURITY_GROUP_NOT_CONFIG = {1263: "Admin account doesn't have an Security group named SDPS-CustomDB"}
# label
LABEL_EXIST_FAILED = {1611: "Cannot create duplicated label"}

Expand Down
20 changes: 15 additions & 5 deletions source/constructs/api/data_source/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -2774,11 +2774,21 @@ def batch_create(file: UploadFile = File(...)):
# Query network info
if account_set:
account_info = list(account_set)[0].split("/")
network = query_account_network(AccountInfo(account_provider_id=account_info[0], account_id=account_info[1], region=account_info[2])) \
.get('vpcs', [])[0]
vpc_id = network.get('vpcId')
subnets = [subnet.get('subnetId') for subnet in network.get('subnets')]
security_group_id = network.get('securityGroups', [])[0].get('securityGroupId')
networks = query_account_network(AccountInfo(account_provider_id=account_info[0], account_id=account_info[1], region=account_info[2])) \
.get('vpcs', [])
security_group_id = None
for network in networks:
for securityGroup in network.get('securityGroups',[]):
if securityGroup.get("securityGroupName") == const.SECURITY_GROUP_JDBC:
# vpc_id = network.get('vpcId')
subnets = [subnet.get('subnetId') for subnet in network.get('subnets')]
security_group_id = securityGroup.get("securityGroupId")
break
if security_group_id:
break
if not security_group_id:
raise BizException(MessageEnum.SOURCE_BATCH_SECURITY_GROUP_NOT_CONFIG.get_code(),
MessageEnum.SOURCE_BATCH_SECURITY_GROUP_NOT_CONFIG.get_msg())
created_jdbc_list = __map_network_jdbc(created_jdbc_list, subnets, security_group_id)
batch_result = asyncio.run(batch_add_conn_jdbc(created_jdbc_list))
result = {f"{item[0]}/{item[1]}/{item[2]}/{item[3]}": f"{item[4]}/{item[5]}" for item in batch_result}
Expand Down

0 comments on commit 1c4c1f3

Please sign in to comment.