From 0fc8cc737f39e53f0bf2828d6adb53270ecd780d Mon Sep 17 00:00:00 2001 From: Monnerat Marc swisstopo Date: Thu, 27 Jun 2024 20:33:22 +0200 Subject: [PATCH 1/3] encoing for JSON file 8but why is it cp252? --- SymbolsFilter.pyt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SymbolsFilter.pyt b/SymbolsFilter.pyt index f33fdcd..622a4d1 100644 --- a/SymbolsFilter.pyt +++ b/SymbolsFilter.pyt @@ -178,7 +178,7 @@ def save_to_files(output_path, filtered, drop_null=True, engine=None): try: data = filtered # results["layers"] - with open(output_path.replace(".xlsx", ".json"), "w", encoding="utf-8") as f: + with open(output_path.replace(".xlsx", ".json"), "w", encoding="windows-1252") as f: # Serialize the data and write it to the file json.dump(filtered, f, ensure_ascii=False, indent=4) except Exception as e: From 8116503ddea0e78c5e5a690c6feb0c40ddc7bdcd Mon Sep 17 00:00:00 2001 From: Monnerat Marc swisstopo Date: Thu, 27 Jun 2024 20:41:59 +0200 Subject: [PATCH 2/3] cleanup --- SymbolsFilter.pyt | 32 ++++++++++++++------------------ SymbolsFilter.pyt.xml | 2 +- layer_symbols_rules.json | 6 +++--- 3 files changed, 18 insertions(+), 22 deletions(-) diff --git a/SymbolsFilter.pyt b/SymbolsFilter.pyt index 622a4d1..a89a5ef 100644 --- a/SymbolsFilter.pyt +++ b/SymbolsFilter.pyt @@ -178,7 +178,9 @@ def save_to_files(output_path, filtered, drop_null=True, engine=None): try: data = filtered # results["layers"] - with open(output_path.replace(".xlsx", ".json"), "w", encoding="windows-1252") as f: + with open( + output_path.replace(".xlsx", ".json"), "w", encoding="windows-1252" + ) as f: # Serialize the data and write it to the file json.dump(filtered, f, ensure_ascii=False, indent=4) except Exception as e: @@ -327,7 +329,6 @@ class SymbolFilter: except Exception as e: logger.error(e) - messages.addErrorMessage(type(spatial_filter)) messages.addErrorMessage( "Layer {0} has no selected features.".format(inLayer) ) @@ -359,10 +360,12 @@ class SymbolFilter: labels = renderer.get("labels") sql = get_query_defn(data) - messages.addMessage(f"sql={sql}") + messages.addMessage(f" sql={sql}") if columns is None: - logger.warning(f"No headings found for {layername}: {columns}") + logger.warning( + f" No headings found for {layername}: {columns}. Skipping" + ) continue # Get the selected features using a search cursor with spatial filter @@ -376,15 +379,14 @@ class SymbolFilter: feature_class_path, spatial_filter=spatial_filter, query=sql ) df = arcgis_table_to_df("TOPGIS_GC.GC_BED_FORM_ATT") - logger.debug(df) - logger.debug(gdf) gdf = gdf.merge(df, left_on="FORM_ATT", right_on="UUID") - logger.debug(f" ====== MERGING") - logger.debug(gdf) # TODO if not "toto" in layername: # "Quelle" in layername: if columns is None or any(col is None for col in columns): + messages.addErrorMessage( + f" column are not valid: {columns}. Skipping" + ) logger.error(f" column are not valid: {columns}") continue if gdf is None: @@ -397,13 +399,13 @@ class SymbolFilter: ) except Exception as e: logger.error( - f"Error while getting dataframe fro layer {layername}: {e}" + f"Error while getting dataframe from layer {layername}: {e}" ) continue feat_total = str(len(gdf)) messages.addMessage( - f"{feat_total : >10} objects in selected feature".encode("cp1252") + f"{feat_total : >10} objects in selected extent".encode("cp1252") ) complex_filter_criteria = get_complex_filter_criteria( @@ -420,7 +422,7 @@ class SymbolFilter: results = {} for label, criteria in complex_filter_criteria: - logger.info(f"\nApplying criteria: {label}, {criteria}") + logger.debug(f"\nApplying criteria: {label}, {criteria}") # Start with a True series to filter filter_expression = pd.Series([True] * len(df), index=df.index) @@ -435,12 +437,6 @@ class SymbolFilter: # Apply the final filter to the DataFrame filtered_df = df[filter_expression] - """results[label] = { - "count": len(filtered_df), - "rows": filtered_df.to_json(orient='records') , # filtered_df, - "criteria": criteria, - }""" - count = len(filtered_df) if count > 0: @@ -458,7 +454,6 @@ class SymbolFilter: logger.info(f"Count: {result['count']}") logger.info("Matching Rows:") logger.info(result["rows"])""" - logger.info("---") filtered[layername] = results @@ -466,6 +461,7 @@ class SymbolFilter: # TODO: encoding issue save_to_files(output_path, filtered, drop_null=True, engine=None) + messages.addMessage("Done.") return diff --git a/SymbolsFilter.pyt.xml b/SymbolsFilter.pyt.xml index 9d4efc6..56b902d 100644 --- a/SymbolsFilter.pyt.xml +++ b/SymbolsFilter.pyt.xml @@ -1,2 +1,2 @@ -20240625103715001.0TRUE202406271428011500000005000ItemDescriptionc:\program files\arcgis\pro\Resources\Help\gpSymbolsFilterCleaning up symbols without any features in a given extentsymbolsrulesArcToolbox Toolbox20240626 +20240625103715001.0TRUE202406271604221500000005000ItemDescriptionc:\program files\arcgis\pro\Resources\Help\gpSymbolsFilterCleaning up symbols without any features in a given extentsymbolsrulesArcToolbox Toolbox20240626 diff --git a/layer_symbols_rules.json b/layer_symbols_rules.json index 414acdd..1f1fdd6 100644 --- a/layer_symbols_rules.json +++ b/layer_symbols_rules.json @@ -16674,7 +16674,7 @@ "headings": [ "TTEC_STATUS", "TTEC_META_STA", - null + "TTEC_LIM_TYP" ], "labels": [ "gesichert, Decken trennend", @@ -17158,7 +17158,7 @@ "" ], "headings": [ - null + "" ], "labels": [ "Pliozän", @@ -17237,7 +17237,7 @@ "Seebodesediment" ], "headings": [ - null + "SEEBODESEDIMENT" ], "labels": [ "Seebodensediment, Holozän", From 755bfec032c027a3e9ff7df0404d1abbbdc26536 Mon Sep 17 00:00:00 2001 From: Monnerat Marc swisstopo Date: Thu, 27 Jun 2024 21:15:38 +0200 Subject: [PATCH 3/3] counting objects --- SymbolsFilter.pyt | 34 ++++++++++++++-------- SymbolsFilter.pyt.xml | 2 +- layer_symbols_rules.json | 62 ---------------------------------------- 3 files changed, 23 insertions(+), 75 deletions(-) diff --git a/SymbolsFilter.pyt b/SymbolsFilter.pyt index a89a5ef..f7b7d3c 100644 --- a/SymbolsFilter.pyt +++ b/SymbolsFilter.pyt @@ -156,8 +156,12 @@ def get_complex_filter_criteria(labels, values, columns): def convert_columns(df, columns_to_convert): # Check if conversion is possible and convert - try: - for col in columns_to_convert: + + for col in columns_to_convert: + if col is None or col == "": + logger.warning(f"Not converting column: {col}") + continue + try: if ( df[col] .dropna() @@ -166,10 +170,10 @@ def convert_columns(df, columns_to_convert): ): # Fill NaN values with 0 (or another specific value) before conversion df[col] = df[col].fillna(0).astype(int) - except KeyError as ke: - logger.error(f"Key error while converting column {col}: {ke}") - except Exception as e: - logger.error(f"Unknown error: {e}") + except KeyError as ke: + logger.error(f"Key error while converting column {col}: {ke}") + except Exception as e: + logger.error(f"Unknown error: {e}") return df @@ -381,8 +385,9 @@ class SymbolFilter: df = arcgis_table_to_df("TOPGIS_GC.GC_BED_FORM_ATT") gdf = gdf.merge(df, left_on="FORM_ATT", right_on="UUID") - # TODO - if not "toto" in layername: # "Quelle" in layername: + # TODO Attribut SEEBODEN??? + if not "Deposits_Chrono" in layername: # "Quelle" in layername: + features_rules_sum = 0 if columns is None or any(col is None for col in columns): messages.addErrorMessage( f" column are not valid: {columns}. Skipping" @@ -404,10 +409,6 @@ class SymbolFilter: continue feat_total = str(len(gdf)) - messages.addMessage( - f"{feat_total : >10} objects in selected extent".encode("cp1252") - ) - complex_filter_criteria = get_complex_filter_criteria( labels, values, columns ) @@ -438,6 +439,7 @@ class SymbolFilter: filtered_df = df[filter_expression] count = len(filtered_df) + features_rules_sum += count if count > 0: count_str = str(count) @@ -456,6 +458,14 @@ class SymbolFilter: logger.info(result["rows"])""" filtered[layername] = results + messages.addMessage( + f" ----------\n{feat_total : >10} in selected extent (with query_defn)".encode( + "cp1252" + ) + ) + messages.addMessage( + f"{features_rules_sum : >10} in classes".encode("cp1252") + ) messages.addMessage(f"---- Saving results to {output_path} ----------") diff --git a/SymbolsFilter.pyt.xml b/SymbolsFilter.pyt.xml index 56b902d..0480e4d 100644 --- a/SymbolsFilter.pyt.xml +++ b/SymbolsFilter.pyt.xml @@ -1,2 +1,2 @@ -20240625103715001.0TRUE202406271604221500000005000ItemDescriptionc:\program files\arcgis\pro\Resources\Help\gpSymbolsFilterCleaning up symbols without any features in a given extentsymbolsrulesArcToolbox Toolbox20240626 +20240625103715001.0TRUE202406272111471500000005000ItemDescriptionc:\program files\arcgis\pro\Resources\Help\gpSymbolsFilterCleaning up symbols without any features in a given extentsymbolsrulesArcToolbox Toolbox20240626 diff --git a/layer_symbols_rules.json b/layer_symbols_rules.json index 1f1fdd6..d8ce971 100644 --- a/layer_symbols_rules.json +++ b/layer_symbols_rules.json @@ -17152,68 +17152,6 @@ ], "dataSource": "Instance=GCOVERP,Database Platform=Oracle,Version=SDE.DEFAULT (Traditional),Authentication Type=Operating System Authentication,Feature Dataset=TOPGIS_GC.GC_ROCK_BODIES,Dataset=TOPGIS_GC.GC_LINEAR_OBJECTS" }, - "Unco Deposits_Chrono": { - "renderer": { - "headings_alias": [ - "" - ], - "headings": [ - "" - ], - "labels": [ - "Pliozän", - "Holozän", - "Frühes Pleistozän", - "Mittleres Pleistozän", - "Spätes Pleistozän", - "Pleistozän" - ], - "values": [ - [ - [ - "15001014" - ] - ], - [ - [ - "15001004" - ] - ], - [ - [ - "15001009" - ] - ], - [ - [ - "15001007" - ] - ], - [ - [ - "15001006" - ] - ], - [ - [ - "15001005" - ] - ] - ] - }, - "query_defn": [ - { - "name": "" - }, - { - "sql": "RUNC_LITHO IN (15101019, 15101021, 15101024, 15101027, 15101028, 15101029, 15101030, 15101031, 15101032, 15101033, 15101034, 15101046, 15101047, 15101048, 15101049, 15101055, 15101058)" - }, - { - "isActive": true - } - ], - "dataSource": "Instance=GCOVERP,Database Platform=Oracle,Version=SDE.DEFAULT (Traditional),Authentication Type=Operating System Authentication,Feature Dataset=TOPGIS_GC.GC_ROCK_BODIES,Dataset=TOPGIS_GC.GC_UNCO_DESPOSIT" - }, "Unco Deposits_Litho_>20000": { "renderer": { "type": "SimpleRenderer"