Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Exporting symbols rules dynamically #15

Open
wants to merge 3 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 46 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,54 @@
# lg-symbols-filter
# ArcGis PRO (GOTOP) Symbols counter

ESRI ArcGis Pro Toolbox to filter symbols which are not currently not used in a given extent
of Geocover

## Install

### Python environment and dependencies

This toolbox requires an additional Python module `geopandas` which must be installed
and the default `ArcGis Pro`conda environment cannot be modified.

Clone the default `arcgispro-py3` environment:

Project > Package Manager. Install the environment on the D:\ drive for example D:\conda\envs\CLONE

Switch to this new environment and restart ArcGis Pro.

Add the missing geopandas package:

Project > Package Manager > Add package

![Add package](images/symbolfilter-add-package-red.png)

The environment is ready.

### Install python ToolBox

Download the code for the latest release from https://github.com/procrastinatio/lg-symbols-filter/releases and unzip the file in a directory accessible by ESRI ArcGis Pro.
In ArcGis, open the catalogue tab and add the SymbolsFilter.pyt file to the Toolboxes directory.
Only one tool exists: SymbolFilter

## Usage

Open or create an ArcGis Pro project with GeoCover layers (GOTOP - Production (.lyrx))

\\v0t0020a.adr.admin.ch\lg\01_PRODUKTION\GIS\TOPGIS\NEPRO\GoTOP\Lyrx_P

Add the Mapsheet layer and select one of these polygons

![Toolbox in Catalog](images/symbolfilter-catalog.png)

Two input parameters:
The layer with the name of the layer containing the perimeter of interest (only one feature must be selected)
The file containing the symbol definitions (static for the moment)

Output
The name of the Excel file in which to write the results

![Toolbox input dialog](images/symbolfilter-dialog.png)

Run with Run (at the bottom...)

![Dialog box](images/symbolfilter-messages.png)
195 changes: 111 additions & 84 deletions SymbolsFilter.pyt
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import shutil

import helpers
import arcpy_logger
import exporter

import importlib

Expand All @@ -26,6 +27,7 @@ except ImportError:

importlib.reload(helpers) # force reload of the module
importlib.reload(arcpy_logger)
importlib.reload(exporter)

sys.dont_write_bytecode = True

Expand All @@ -48,7 +50,7 @@ DEFAULT_FILTERED_SYMBOL_FILE = os.path.join(
)"""


log_level = "WARNING"
log_level = "WARN"

# set logging level
if isinstance(log_level, str):
Expand All @@ -65,6 +67,10 @@ ah = arcpy_logger.ArcpyHandler()
ah.setFormatter(log_frmt)
logger.addHandler(ah)

fileHandler = logging.FileHandler("{0}/{1}.log".format(toolbox_dir, "SymbolsFilter"))
fileHandler.setFormatter(log_frmt)
logger.addHandler(fileHandler)


def convert_to_int(x):
if x == "<Null>" or x is None:
Expand Down Expand Up @@ -123,10 +129,10 @@ def get_query_defn(data):


def get_columns(renderer, layername):
columns = renderer.get("headings")
columns = renderer.get("fields")

if columns is None or None in columns:
logger.warning(f"No headings found for {layername}: {columns}")
logger.warning(f"No fields found for {layername}: {columns}")
else:
columns = list(map(get_last_element, columns))
return columns
Expand Down Expand Up @@ -180,7 +186,7 @@ def convert_columns(df, columns_to_convert):

def save_to_files(output_path, filtered, drop_null=True, engine=None):
try:
data = filtered # results["layers"]
data = filtered

with open(
output_path.replace(".xlsx", ".json"), "w", encoding="windows-1252"
Expand Down Expand Up @@ -270,16 +276,7 @@ class SymbolFilter:
direction="Input",
)

# Second parameter
param1 = arcpy.Parameter(
displayName="Symbol rules JSON file",
name="in_file",
datatype="DEFile",
parameterType="Required",
direction="Input",
)

param2 = arcpy.Parameter(
displayName="Output File (.xlsx)",
name="out_file",
datatype="DEFile",
Expand All @@ -288,10 +285,9 @@ class SymbolFilter:
)

param0.values = "Mapsheet"
param1.values = DEFAULT_SYMBOL_RULES_JSON
param2.values = DEFAULT_FILTERED_SYMBOL_FILE
param1.values = DEFAULT_FILTERED_SYMBOL_FILE

params = [param0, param1, param2]
params = [param0, param1]
return params

def isLicensed(self):
Expand All @@ -312,18 +308,21 @@ class SymbolFilter:
def execute(self, parameters, messages):
"""The source code of the tool."""

from helpers import arcgis_table_to_df # Twice imported
from helpers import arcgis_table_to_df

inLayer = parameters[0].valueAsText
inSymbolsFile = parameters[1].valueAsText
output_path = parameters[2].valueAsText
output_path = parameters[1].valueAsText
spatial_filter = None
filtered = {}
dataset = None
drop = True

arcpy.env.workspace = setup_connection(toolbox_dir)

output_dir = os.path.dirname(output_path)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)

try:
# Read the mask file (shapefile or GeoJSON)
spatial_filter = helpers.get_selected_features(inLayer)
Expand All @@ -339,16 +338,46 @@ class SymbolFilter:
)
raise arcpy.ExecuteError

# Get the current project and active map
aprx = arcpy.mp.ArcGISProject("CURRENT")
active_map = aprx.activeMap
# List all layers in the active map
layers = active_map.listLayers()

rules_dict = {}

messages.addMessage(f"##### EXTRACTING RULES #####")

for layer in layers:
if layer.name == inLayer or not layer.isFeatureLayer:
continue
layername = layer.name
try:
messages.addMessage(f"Getting symbols rules for '{layername}''")
attributes = exporter.rules_exporter(layer)
rules_dict[layername] = attributes
except Exception as e:
logger.error(f"Cannot get symbols rules for {layername}: {e}")
raise arcpy.ExecuteError
messages.addMessage(f"Writting rules to {DEFAULT_SYMBOL_RULES_JSON}")
with open(DEFAULT_SYMBOL_RULES_JSON, "w", encoding="utf-8") as f:
json.dump(rules_dict, f, ensure_ascii=False, indent=4)
del rules_dict

try:
with open(inSymbolsFile, "r") as f:
layers = json.load(f)
with open(DEFAULT_SYMBOL_RULES_JSON, "r") as f:
rules_dict = json.load(f)
except IOError as e:
messages.addErrorMessage(f"Cannot open {inSymbolsFile}")
messages.addErrorMessage(f"Cannot open {DEFAULT_SYMBOL_RULES_JSON}")
raise arcpy.ExecuteError

for layername in layers.keys():
messages.addMessage(f"##### FILTERING SYMBOLS WITH RULES #####")

for layername in rules_dict.keys():
messages.addMessage(f"--- {layername} ---".encode("cp1252"))
data = layers.get(layername)
logger.info(f"--- {layername} ---")

data = rules_dict.get(layername)

dataset = get_dataset(data)
renderer = get_renderer(data)
Expand All @@ -360,9 +389,12 @@ class SymbolFilter:
feature_class_path = dataset

# headers
values = []
labels = []
columns = get_columns(renderer, layername)
values = renderer.get("values")
labels = renderer.get("labels")
for grp in renderer.get("groups", []):
values += grp.get("values", [])
labels += grp.get("labels", [])

sql = get_query_defn(data)
messages.addMessage(f" sql={sql}")
Expand All @@ -386,79 +418,74 @@ class SymbolFilter:
df = arcgis_table_to_df("TOPGIS_GC.GC_BED_FORM_ATT")
gdf = gdf.merge(df, left_on="FORM_ATT", right_on="UUID")

# TODO Attribut SEEBODEN???
if not "Deposits_Chrono" in layername: # "Quelle" in layername:
features_rules_sum = 0
if columns is None or any(col is None for col in columns):
messages.addErrorMessage(
f"<null> column are not valid: {columns}. Skipping"
features_rules_sum = 0
if columns is None or any(col is None for col in columns):
messages.addErrorMessage(
f"<null> column are not valid: {columns}. Skipping"
)
logger.error(f"<null> column are not valid: {columns}")
continue
if gdf is None:
try:
gdf = arcgis_table_to_df(
feature_class_path,
input_fields=["OBJECTID"] + columns,
spatial_filter=spatial_filter,
query=sql,
)
except Exception as e:
logger.error(
f"Error while getting dataframe from layer {layername}: {e}"
)
logger.error(f"<null> column are not valid: {columns}")
continue
if gdf is None:
try:
gdf = arcgis_table_to_df(
feature_class_path,
input_fields=["OBJECTID"] + columns,
spatial_filter=spatial_filter,
query=sql,
)
except Exception as e:
logger.error(
f"Error while getting dataframe from layer {layername}: {e}"
)
continue
feat_total = str(len(gdf))

complex_filter_criteria = get_complex_filter_criteria(
labels, values, columns
)
feat_total = str(len(gdf))

df = gdf
complex_filter_criteria = get_complex_filter_criteria(
labels, values, columns
)

columns_to_convert = columns
df = gdf

df = convert_columns(df, columns_to_convert)
columns_to_convert = columns

# Store counts and rows for each complex filter criterion
results = {}
df = convert_columns(df, columns_to_convert)

for label, criteria in complex_filter_criteria:
logger.debug(f"\nApplying criteria: {label}, {criteria}")
# Store counts and rows for each complex filter criterion
results = {}

# Start with a True series to filter
filter_expression = pd.Series([True] * len(df), index=df.index)
for label, criteria in complex_filter_criteria:
logger.debug(f"\nApplying criteria: {label}, {criteria}")

for column, value in criteria:
# Update the filter expression for each (column, value) pair
filter_expression &= df[column] == value
logger.debug(f"Filter status for ({column} == {value}):")
logger.debug(filter_expression)
logger.debug(f"Matching rows count: {filter_expression.sum()}")
# Start with a True series to filter
filter_expression = pd.Series([True] * len(df), index=df.index)

# Apply the final filter to the DataFrame
filtered_df = df[filter_expression]
for column, value in criteria:
# Update the filter expression for each (column, value) pair
filter_expression &= df[column] == value
logger.debug(f"Filter status for ({column} == {value}):")
logger.debug(filter_expression)
logger.debug(f"Matching rows count: {filter_expression.sum()}")

count = len(filtered_df)
features_rules_sum += count
# Apply the final filter to the DataFrame
filtered_df = df[filter_expression]

if count > 0:
count_str = str(count)
messages.addMessage(
f"{count_str : >10} {label}".encode("cp1252")
)
results[label] = count
count = len(filtered_df)
features_rules_sum += count

if count > 0:
count_str = str(count)
messages.addMessage(f"{count_str : >10} {label}".encode("cp1252"))
results[label] = count

filtered[layername] = results
messages.addMessage(
f" ----------\n{feat_total : >10} in selected extent (with query_defn)".encode(
"cp1252"
)
)
messages.addMessage(
f"{features_rules_sum : >10} in classes".encode("cp1252")
filtered[layername] = results
messages.addMessage(
f" ----total------\n{feat_total : >10} in selected extent (with query_defn)".encode(
"cp1252"
)
)
messages.addMessage(
f"{features_rules_sum : >10} in classes".encode("cp1252")
)

messages.addMessage(f"---- Saving results to {output_path} ----------")

Expand Down
2 changes: 1 addition & 1 deletion SymbolsFilter.pyt.xml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
<?xml version="1.0"?>
<metadata xml:lang="en"><Esri><CreaDate>20240625</CreaDate><CreaTime>10371500</CreaTime><ArcGISFormat>1.0</ArcGISFormat><SyncOnce>TRUE</SyncOnce><ModDate>20240627</ModDate><ModTime>213554</ModTime><scaleRange><minScale>150000000</minScale><maxScale>5000</maxScale></scaleRange><ArcGISProfile>ItemDescription</ArcGISProfile></Esri><toolbox name="SymbolsFilter" alias="Geocover"><arcToolboxHelpPath>c:\program files\arcgis\pro\Resources\Help\gp</arcToolboxHelpPath><toolsets/></toolbox><dataIdInfo><idCitation><resTitle>SymbolsFilter</resTitle></idCitation><idPurp>Cleaning up symbols without any features in a given extent</idPurp><searchKeys><keyword>symbols</keyword><keyword>rules</keyword></searchKeys></dataIdInfo><distInfo><distributor><distorFormat><formatName>ArcToolbox Toolbox</formatName></distorFormat></distributor></distInfo><mdHrLv><ScopeCd value="005"></ScopeCd></mdHrLv><mdDateSt Sync="TRUE">20240626</mdDateSt></metadata>
<metadata xml:lang="en"><Esri><CreaDate>20240625</CreaDate><CreaTime>10371500</CreaTime><ArcGISFormat>1.0</ArcGISFormat><SyncOnce>TRUE</SyncOnce><ModDate>20240702</ModDate><ModTime>135349</ModTime><scaleRange><minScale>150000000</minScale><maxScale>5000</maxScale></scaleRange><ArcGISProfile>ItemDescription</ArcGISProfile></Esri><toolbox name="SymbolsFilter" alias="Geocover"><arcToolboxHelpPath>c:\program files\arcgis\pro\Resources\Help\gp</arcToolboxHelpPath><toolsets/></toolbox><dataIdInfo><idCitation><resTitle>SymbolsFilter</resTitle></idCitation><idPurp>Cleaning up symbols without any features in a given extent</idPurp><searchKeys><keyword>symbols</keyword><keyword>rules</keyword></searchKeys></dataIdInfo><distInfo><distributor><distorFormat><formatName>ArcToolbox Toolbox</formatName></distorFormat></distributor></distInfo><mdHrLv><ScopeCd value="005"></ScopeCd></mdHrLv><mdDateSt Sync="TRUE">20240626</mdDateSt></metadata>
Loading