diff --git a/.github/workflows/cicd.yaml b/.github/workflows/cicd.yaml index 95cb134e..23d95f81 100644 --- a/.github/workflows/cicd.yaml +++ b/.github/workflows/cicd.yaml @@ -40,8 +40,8 @@ jobs: - name: Example inference run via Docker with default config and checkpoint run: > docker run - -v /var/data/cicd/CICD_github_assets/myria3d_V3.4.0/inputs/:/inputs/ - -v /var/data/cicd/CICD_github_assets/myria3d_V3.4.0/outputs/:/outputs/ + -v /var/data/cicd/CICD_github_assets/myria3d_V3.5.0/inputs/:/inputs/ + -v /var/data/cicd/CICD_github_assets/myria3d_V3.5.0/outputs/:/outputs/ --ipc=host --shm-size=2gb myria3d @@ -54,14 +54,14 @@ jobs: - name: Example inference run via Docker with inference-time subtiles overlap to smooth-out results. run: > docker run - -v /var/data/cicd/CICD_github_assets/myria3d_V3.4.0/inputs/:/inputs/ - -v /var/data/cicd/CICD_github_assets/myria3d_V3.4.0/outputs/:/outputs/ + -v /var/data/cicd/CICD_github_assets/myria3d_V3.5.0/inputs/:/inputs/ + -v /var/data/cicd/CICD_github_assets/myria3d_V3.5.0/outputs/:/outputs/ --ipc=host --shm-size=2gb myria3d python run.py --config-path /inputs/ - --config-name proto151_V2.0_epoch_100_Myria3DV3.1.0_predict_config_V3.4.0 + --config-name proto151_V2.0_epoch_100_Myria3DV3.1.0_predict_config_V3.5.0 predict.ckpt_path=/inputs/proto151_V2.0_epoch_100_Myria3DV3.1.0.ckpt predict.src_las=/inputs/792000_6272000_subset_buildings.las predict.output_dir=/outputs/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 681213c6..5bed9094 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # CHANGELOG +## 3.5.0 +- Abandon of option to get circular patches since it was never used. + ### 3.4.12 - Remove COPC datasets and dataloaders since they were abandonned and never used. diff --git a/configs/datamodule/hdf5_datamodule.yaml b/configs/datamodule/hdf5_datamodule.yaml index c39147a3..10fac67f 100755 --- a/configs/datamodule/hdf5_datamodule.yaml +++ b/configs/datamodule/hdf5_datamodule.yaml @@ -19,7 +19,6 @@ pre_filter: tile_width: 1000 subtile_width: 50 -subtile_shape: "square" # "square" or "disk" subtile_overlap_train: 0 subtile_overlap_predict: "${predict.subtile_overlap}" diff --git a/docs/source/apidoc/default_config.yml b/docs/source/apidoc/default_config.yml index acee234b..a6530417 100644 --- a/docs/source/apidoc/default_config.yml +++ b/docs/source/apidoc/default_config.yml @@ -119,7 +119,6 @@ datamodule: min_num_nodes: 50 tile_width: 1000 subtile_width: 50 - subtile_shape: square subtile_overlap_train: 0 subtile_overlap_predict: ${predict.subtile_overlap} batch_size: 2 diff --git a/myria3d/pctl/datamodule/hdf5.py b/myria3d/pctl/datamodule/hdf5.py index 0248ef4d..72925343 100644 --- a/myria3d/pctl/datamodule/hdf5.py +++ b/myria3d/pctl/datamodule/hdf5.py @@ -11,7 +11,6 @@ from myria3d.pctl.dataset.hdf5 import HDF5Dataset from myria3d.pctl.dataset.iterable import InferenceDataset from myria3d.pctl.dataset.utils import ( - SHAPE_TYPE, get_las_paths_by_split_dict, pre_filter_below_n_points, ) @@ -34,13 +33,13 @@ def __init__( pre_filter: Optional[Callable[[Data], bool]] = pre_filter_below_n_points, tile_width: Number = 1000, subtile_width: Number = 50, - subtile_shape: SHAPE_TYPE = "square", subtile_overlap_train: Number = 0, subtile_overlap_predict: Number = 0, batch_size: int = 12, num_workers: int = 1, prefetch_factor: int = 2, transforms: Optional[Dict[str, TRANSFORMS_LIST]] = None, + **kwargs, ): self.split_csv_path = split_csv_path self.data_dir = data_dir @@ -53,7 +52,6 @@ def __init__( self.tile_width = tile_width self.subtile_width = subtile_width - self.subtile_shape = subtile_shape self.subtile_overlap_train = subtile_overlap_train self.subtile_overlap_predict = subtile_overlap_predict @@ -134,7 +132,6 @@ def dataset(self) -> HDF5Dataset: tile_width=self.tile_width, subtile_width=self.subtile_width, subtile_overlap_train=self.subtile_overlap_train, - subtile_shape=self.subtile_shape, pre_filter=self.pre_filter, train_transform=self.train_transform, eval_transform=self.eval_transform, @@ -174,7 +171,6 @@ def _set_predict_data(self, las_file_to_predict): transform=self.predict_transform, tile_width=self.tile_width, subtile_width=self.subtile_width, - subtile_shape=self.subtile_shape, subtile_overlap=self.subtile_overlap_predict, ) diff --git a/myria3d/pctl/dataset/hdf5.py b/myria3d/pctl/dataset/hdf5.py index b63960a8..a6cbbfbe 100644 --- a/myria3d/pctl/dataset/hdf5.py +++ b/myria3d/pctl/dataset/hdf5.py @@ -12,7 +12,6 @@ from myria3d.pctl.dataset.utils import ( LAS_PATHS_BY_SPLIT_DICT_TYPE, - SHAPE_TYPE, SPLIT_TYPE, pre_filter_below_n_points, split_cloud_into_samples, @@ -34,7 +33,6 @@ def __init__( tile_width: Number = 1000, subtile_width: Number = 50, subtile_overlap_train: Number = 0, - subtile_shape: SHAPE_TYPE = "square", pre_filter=pre_filter_below_n_points, train_transform: List[Callable] = None, eval_transform: List[Callable] = None, @@ -48,7 +46,6 @@ def __init__( points_pre_transform (Callable): Function to turn pdal points into a pyg Data object. tile_width (Number, optional): width of a LAS tile. Defaults to 1000. subtile_width (Number, optional): effective width of a subtile (i.e. receptive field). Defaults to 50. - subtile_shape (SHAPE_TYPE, optional): Shape of subtile could be either "square" or "disk". Defaults to "square". subtile_overlap_train (Number, optional): Overlap for data augmentation of train set. Defaults to 0. pre_filter (_type_, optional): Function to filter out specific subtiles. Defaults to None. train_transform (List[Callable], optional): Transforms to apply to a sample for training. Defaults to None. @@ -64,7 +61,6 @@ def __init__( self.tile_width = tile_width self.subtile_width = subtile_width self.subtile_overlap_train = subtile_overlap_train - self.subtile_shape = subtile_shape self.hdf5_file_path = hdf5_file_path @@ -85,7 +81,6 @@ def __init__( hdf5_file_path, tile_width, subtile_width, - subtile_shape, pre_filter, subtile_overlap_train, points_pre_transform, @@ -202,7 +197,6 @@ def create_hdf5( hdf5_file_path: str, tile_width: Number = 1000, subtile_width: Number = 50, - subtile_shape: SHAPE_TYPE = "square", pre_filter: Optional[Callable[[Data], bool]] = pre_filter_below_n_points, subtile_overlap_train: Number = 0, points_pre_transform: Callable = lidar_hd_pre_transform, @@ -218,7 +212,6 @@ def create_hdf5( hdf5_file_path (str): path to HDF5 dataset, tile_width (Number, optional): width of a LAS tile. 1000 by default, subtile_width: (Number, optional): effective width of a subtile (i.e. receptive field). 50 by default, - subtile_shape (SHAPE_TYPE, optional): Shape of subtile could be either "square" or "disk". "square" by default , pre_filter: Function to filter out specific subtiles. "pre_filter_below_n_points" by default, subtile_overlap_train (Number, optional): Overlap for data augmentation of train set. 0 by default, points_pre_transform (Callable): Function to turn pdal points into a pyg Data object. @@ -253,7 +246,6 @@ def create_hdf5( las_path, tile_width, subtile_width, - subtile_shape, subtile_overlap, ) ): diff --git a/myria3d/pctl/dataset/iterable.py b/myria3d/pctl/dataset/iterable.py index 6abdda9d..07904c53 100644 --- a/myria3d/pctl/dataset/iterable.py +++ b/myria3d/pctl/dataset/iterable.py @@ -7,7 +7,6 @@ from torch_geometric.data import Data from myria3d.pctl.dataset.utils import ( - SHAPE_TYPE, pre_filter_below_n_points, split_cloud_into_samples, ) @@ -26,7 +25,6 @@ def __init__( tile_width: Number = 1000, subtile_width: Number = 50, subtile_overlap: Number = 0, - subtile_shape: SHAPE_TYPE = "square", ): self.las_file = las_file @@ -36,7 +34,6 @@ def __init__( self.tile_width = tile_width self.subtile_width = subtile_width - self.subtile_shape = subtile_shape self.subtile_overlap = subtile_overlap def __iter__(self): @@ -48,7 +45,6 @@ def get_iterator(self): self.las_file, self.tile_width, self.subtile_width, - self.subtile_shape, self.subtile_overlap, ): sample_data = self.points_pre_transform(sample_points) diff --git a/myria3d/pctl/dataset/utils.py b/myria3d/pctl/dataset/utils.py index 6b2e5960..60d03d6e 100644 --- a/myria3d/pctl/dataset/utils.py +++ b/myria3d/pctl/dataset/utils.py @@ -1,6 +1,5 @@ import glob import json -import math from pathlib import Path import subprocess as sp from numbers import Number @@ -12,7 +11,6 @@ from scipy.spatial import cKDTree SPLIT_TYPE = Union[Literal["train"], Literal["val"], Literal["test"]] -SHAPE_TYPE = Union[Literal["disk"], Literal["square"]] LAS_PATHS_BY_SPLIT_DICT_TYPE = Dict[SPLIT_TYPE, List[str]] # commons @@ -104,7 +102,6 @@ def split_cloud_into_samples( las_path: str, tile_width: Number, subtile_width: Number, - shape: SHAPE_TYPE, subtile_overlap: Number = 0, ): """Split LAS point cloud into samples. @@ -112,8 +109,7 @@ def split_cloud_into_samples( Args: las_path (str): path to raw LAS file tile_width (Number): width of input LAS file - subtile_width (Number): width of receptive field ; may be increased for coverage in case of disk shape. - shape: "disk" or "square" + subtile_width (Number): width of receptive field. subtile_overlap (Number, optional): overlap between adjacent tiles. Defaults to 0. Yields: @@ -127,11 +123,6 @@ def split_cloud_into_samples( for center in XYs: radius = subtile_width // 2 # Square receptive field. minkowski_p = np.inf - if shape == "disk": - # Disk receptive field. - # Adapt radius to have complete coverage of the data, with a slight overlap between samples. - minkowski_p = 2 - radius = radius * math.sqrt(2) sample_idx = np.array(kd_tree.query_ball_point(center, r=radius, p=minkowski_p)) if not len(sample_idx): # no points in this receptive fields diff --git a/package_metadata.yaml b/package_metadata.yaml index 807c8bee..a4dc0530 100644 --- a/package_metadata.yaml +++ b/package_metadata.yaml @@ -1,4 +1,4 @@ -__version__: "3.4.11" +__version__: "3.5.0" __name__: "myria3d" __url__: "https://github.com/IGNF/myria3d" __description__: "Deep Learning for the Semantic Segmentation of Aerial Lidar Point Clouds" diff --git a/run.py b/run.py index 3d56cad1..341a99ee 100755 --- a/run.py +++ b/run.py @@ -22,7 +22,7 @@ TASK_NAME_DETECTION_STRING = "task.task_name=" DEFAULT_DIRECTORY = "trained_model_assets/" -DEFAULT_CONFIG_FILE = "proto151_V2.0_epoch_100_Myria3DV3.1.0_predict_config_V3.4.0.yaml" +DEFAULT_CONFIG_FILE = "proto151_V2.0_epoch_100_Myria3DV3.1.0_predict_config_V3.5.0.yaml" DEFAULT_CHECKPOINT = "proto151_V2.0_epoch_100_Myria3DV3.1.0.ckpt" DEFAULT_ENV = "placeholder.env" @@ -96,7 +96,6 @@ def launch_hdf5(config: DictConfig): hdf5_file_path=config.datamodule.get("hdf5_file_path"), tile_width=config.datamodule.get("tile_width"), subtile_width=config.datamodule.get("subtile_width"), - subtile_shape=config.datamodule.get("subtile_shape"), pre_filter=hydra.utils.instantiate(config.datamodule.get("pre_filter")), subtile_overlap_train=config.datamodule.get("subtile_overlap_train"), points_pre_transform=hydra.utils.instantiate( diff --git a/trained_model_assets/proto151_V2.0_epoch_100_Myria3DV3.1.0_predict_config_V3.4.0.yaml b/trained_model_assets/proto151_V2.0_epoch_100_Myria3DV3.1.0_predict_config_V3.5.0.yaml similarity index 99% rename from trained_model_assets/proto151_V2.0_epoch_100_Myria3DV3.1.0_predict_config_V3.4.0.yaml rename to trained_model_assets/proto151_V2.0_epoch_100_Myria3DV3.1.0_predict_config_V3.5.0.yaml index acd15868..009293d6 100644 --- a/trained_model_assets/proto151_V2.0_epoch_100_Myria3DV3.1.0_predict_config_V3.4.0.yaml +++ b/trained_model_assets/proto151_V2.0_epoch_100_Myria3DV3.1.0_predict_config_V3.5.0.yaml @@ -132,7 +132,6 @@ datamodule: min_num_nodes: 1 tile_width: 1000 subtile_width: 50 - subtile_shape: square subtile_overlap_train: 0 subtile_overlap_predict: ${predict.subtile_overlap} batch_size: 10