diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7ff42ec7de..c20d0dfe66 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -5,9 +5,9 @@ jobs: lint: name: Check style (lint) - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - run: sudo rm /etc/apt/sources.list.d/*.list - run: sudo apt update - run: sudo apt-get install libpq-dev @@ -16,9 +16,9 @@ jobs: gitattributes: name: Check style (git-attributes) - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - run: sudo rm /etc/apt/sources.list.d/*.list - run: sudo apt update - run: sudo apt-get install libpq-dev @@ -27,7 +27,7 @@ jobs: test-py: name: Test Python - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 services: # Label used to access the service container postgres: @@ -52,7 +52,7 @@ jobs: - '3.11' - '3.12' steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up Python v${{ matrix.python-version }} uses: actions/setup-python@v5 with: @@ -76,9 +76,9 @@ jobs: test-fed-data: name: Check federal data definitions - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - run: sudo rm /etc/apt/sources.list.d/*.list - run: sudo apt update - run: sudo apt-get install xsltproc @@ -93,9 +93,9 @@ jobs: doc: name: Make and deploy documentation if: github.ref == 'refs/heads/master' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Make documentation run: | sudo rm /etc/apt/sources.list.d/*.list @@ -103,14 +103,14 @@ jobs: sudo apt-get install libpq-dev make doc-html - name: Deploy 🚀 - uses: JamesIves/github-pages-deploy-action@v4.6.3 + uses: JamesIves/github-pages-deploy-action@v4.6.8 with: branch: gh-pages # The branch the action should deploy to. folder: doc/build/html # The folder the action should deploy. build-and-publish: name: Build and publish Python 🐍 distributions 📦 to PyPI - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 needs: [lint, gitattributes] steps: - uses: actions/checkout@master diff --git a/.github/workflows/daily_check.yaml b/.github/workflows/daily_check.yaml index 72d7c7abcc..5145f33a22 100644 --- a/.github/workflows/daily_check.yaml +++ b/.github/workflows/daily_check.yaml @@ -6,9 +6,9 @@ on: jobs: test-fed-data: name: Check federal data definitions - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - run: sudo rm /etc/apt/sources.list.d/*.list - run: sudo apt update - run: sudo apt-get install xsltproc @@ -22,7 +22,7 @@ jobs: test-py: name: Test Python - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 services: # Label used to access the service container postgres: @@ -47,7 +47,7 @@ jobs: - python3.11 - python3.12 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - run: sudo rm /etc/apt/sources.list.d/*.list - run: sudo apt update - run: sudo apt-get install libpq-dev diff --git a/CHANGES.rst b/CHANGES.rst index 38f62b2b09..3271c0acae 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,17 @@ Notes: ------ - This python package specifies the version numbers only of directly imported python packages. This approach may result in a build failure of older versions of the project if incompatibilities arise between imported packages over time. The build process of the master branch is regularly tested in an automatic process. +master +------ +- Support Oereblex API version 1.2.5 via geolink-formatter 2.0.6 (#2081) + +2.5.4 +----- +- New parameter default_toc_length to define a default table of content pages number (#2042) +- Add timeout in address source (#2043) +- Optimize legend entries retrieval (#2050) +- Library upgrades (waitress, sqlalchemy, psycopg2, urllib3) + 2.5.3 ----- - Provide a general WMS verify certificate option diff --git a/Dockerfile b/Dockerfile index 8edd46b9f1..69ad332aa0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12.5-bullseye +FROM python:3.13.0-bullseye ENV DEBIAN_FRONTEND=noninteractive diff --git a/dev/config/pyramid_oereb.yml.mako b/dev/config/pyramid_oereb.yml.mako index 1dc7ed1b75..774c652d44 100644 --- a/dev/config/pyramid_oereb.yml.mako +++ b/dev/config/pyramid_oereb.yml.mako @@ -85,9 +85,18 @@ pyramid_oereb: # Will make an estimation of the total length of the Table of Content (TOC) and control that the page # numbering in the output pdf is consistent with TOC numbering. If it is known that the TOC is very long and # could run over more than one page, it is preferred to set this to true. The drawback is that it might need - # more time to generate the PDF. If set to false, it will assume that only one TOC page exists, and this can - # lead to wrong numbering in the TOC. - compute_toc_pages: true + # more time to generate the PDF. If set to false, the expected_toc_length setting below will be used. If it is + # not set it will assume that only one TOC page exists, and this can lead to wrong numbering in the TOC, which + # will be fixed by a second PDF extract call that has an impact on performance. + compute_toc_pages: false + # In order to skip the computation of the estimated number of TOC pages which might return an erroneous result + # for your setting, you can specify a default for the number of TOC pages. For most of the cantons the number of + # TOC pages is pretty constant unless a real estate is concerned by none or a huge number of restrictions. + # In both cases (computing an estimate or setting a default for the number of TOC pages) the exact number of TOC + # pages is extracted from the created PDF and if it differs from the expected value the PDF is created a second + # time with the correct page numbers. + # Note that if "compute_toc_pages" is set true the "expected_toc_length" is not taken into account. + expected_toc_length: 2 # Specify any additional URL parameters that the print shall use for WMS calls wms_url_params: TRANSPARENT: 'true' @@ -171,7 +180,7 @@ pyramid_oereb: # OEREBlex host host: https://oereblex.sg.ch # geoLink schema version - version: 1.2.2 + version: 1.2.5 # Pass schema version in URL pass_version: true # Enable/disable XML validation diff --git a/doc/source/changes.rst b/doc/source/changes.rst index 40fa311cb0..940fc55b50 100644 --- a/doc/source/changes.rst +++ b/doc/source/changes.rst @@ -6,6 +6,22 @@ Changes/Hints for migration This chapter will give you hints on how to handle version migration, in particular regarding what you may need to adapt in your project configuration, database etc. when upgrading to a new version. +master +------ +- Support of Oereblex API version 1.2.5 via geolink-formatter 2.0.6 added (#2081). The oereblex schema version that is used to download oereblex xmls can be set in the file pyramid_oereb.yaml. + +Version 2.5.4 +------------- +Feature and maintenance release: + +* New parameter 'expected_toc_length' allows to define a default table of content pages number avoiding a + second call for the pdf extract in most cases. This value may be be set if most of the PDF extracts have + the same number of TOC pages. It complements the 'compute_toc_pages' parameter. If the latter is set to true, + 'expected_toc_length' is ignored. +* Add timeout in address source (#2043) +* Optimize legend entries retrieval (#2050) +* Library upgrades (waitress, sqlalchemy, psycopg2, urllib3) + Version 2.5.3 ------------- Feature and maintenance release: @@ -92,7 +108,7 @@ Maintenance release: * Add support for newest oereblex API (via geolink-formatter, #1703) * Various minor library upgrades (SQLAlchemy, geoalchemy, psycopg2, pypdf) - + Version 2.4.2 ------------- @@ -165,7 +181,7 @@ Version 2.2.4 ------------- New functionality for configuring tolerance (optional): -* Support tolerance per geometry type (#1603). See example definitions in the example project configuration file. +* Support tolerance per geometry type (#1603). See example definitions in the example project configuration file. * Library updates (#1604) .. _changes-version-2.2.3: diff --git a/pyproject.toml b/pyproject.toml index dcf901b44b..779f4a0834 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyramid_oereb" -version = "2.5.3" +version = "2.5.4" description = "pyramid_oereb, extension for pyramid web frame work to provide a basic server part for the oereb project" classifiers=[ @@ -35,22 +35,22 @@ Changelog = "https://github.com/openoereb/pyramid_oereb/blob/master/CHANGES.rst" [project.optional-dependencies] # Dependencies listed in "recommend" must be included in "no-version" without explicit version number recommend = [ - "pypdf==4.3.1", + "pypdf==5.1.0", "filetype==1.2.0", "geoalchemy2==0.15.2", "pyramid==2.0.2", "pyramid-debugtoolbar==4.12.1", - "qrcode==7.4.2", + "qrcode==8.0", "image==1.5.33", "shapely==2.0.6", - "SQLAlchemy==2.0.32", + "SQLAlchemy==2.0.36", "pyaml-env==1.2.1", - "urllib3==2.2.2", - "waitress==3.0.0", + "urllib3==2.2.3", + "waitress==3.0.1", "pyreproj==3.0.0", "mako-render==0.1.0", "requests==2.32.3", - "geolink-formatter==2.0.5", + "geolink-formatter==2.0.6", "pyconizer==0.1.4", "c2cwsgiutils[standard]==6.0.8"] no-version = [ @@ -75,21 +75,21 @@ no-version = [ testing = [ "jsonschema==4.23.0", "lxml==5.3.0", - "pytest==8.3.2", - "pytest-cov==5.0.0", + "pytest==8.3.3", + "pytest-cov==6.0.0", "pytest-ordering==0.6", "requests-mock==1.12.1", "responses==0.25.3", - "webtest==3.0.0", - "pillow==10.4.0"] + "webtest==3.0.1", + "pillow==11.0.0"] dev = [ "flake8==7.1.1", "Flake8-pyproject==1.2.3", "pyflakes==3.2.0", "pycodestyle==2.12.1", "Sphinx==7.4.7", - "sphinx_rtd_theme==2.0.0", - "psycopg2==2.9.9", + "sphinx_rtd_theme==3.0.1", + "psycopg2==2.9.10", "mccabe==0.7.0", "c2c.template==2.4.2", "yappi"] diff --git a/pyramid_oereb/contrib/data_sources/interlis_2_3/sources/plr.py b/pyramid_oereb/contrib/data_sources/interlis_2_3/sources/plr.py index 0306a86b29..21cb0edfe9 100644 --- a/pyramid_oereb/contrib/data_sources/interlis_2_3/sources/plr.py +++ b/pyramid_oereb/contrib/data_sources/interlis_2_3/sources/plr.py @@ -470,7 +470,21 @@ def collect_related_geometries_by_real_estate(self, session, real_estate): .selectinload(self.models.MultilingualUri.localised_uri) ).all() - def collect_legend_entries_by_bbox(self, session, bbox, law_status): + def get_legend_entries_from_db(self, session, legend_entry_ids): + """ + Retrieves the legend entries for a list of t_id-values. + + Args: + session (sqlalchemy.orm.Session): The requested clean session instance ready for use + legend_entry_ids (list): list of the egend entry ids + + Returns: + list: the query result represented as a list. + """ + return session.query(self.legend_entry_model).filter( + self.legend_entry_model.t_id.in_(legend_entry_ids)).all() + + def collect_legend_entries_by_bbox(self, session, bbox): """ Extracts all legend entries in the topic which have spatial relation with the passed bounding box of visible extent. @@ -478,12 +492,11 @@ def collect_legend_entries_by_bbox(self, session, bbox, law_status): Args: session (sqlalchemy.orm.Session): The requested clean session instance ready for use bbox (shapely.geometry.base.BaseGeometry): The bbox to search the records. - law_status (str): String of the law status for which the legend entries should be queried. Returns: list: The result of the related geometries unique by the public law restriction id and law status """ - distinct_legend_entry_ids = [] + # Select the legend entries of all plr within bbox geometries = session.query(self._model_).filter( or_( self._model_.point.ST_Intersects(from_shape(bbox, srid=Config.get('srid'))), @@ -493,13 +506,29 @@ def collect_legend_entries_by_bbox(self, session, bbox, law_status): selectinload(self.models.Geometry.public_law_restriction) ).all() + # Compile a list of unique legend entry ids for each law status + legend_entry_ids = dict() for geometry in geometries: - if geometry.public_law_restriction.legend_entry_id not in distinct_legend_entry_ids\ - and geometry.public_law_restriction.law_status == law_status: - distinct_legend_entry_ids.append(geometry.public_law_restriction.legend_entry_id) + if geometry.public_law_restriction.law_status not in legend_entry_ids: + legend_entry_ids[geometry.public_law_restriction.law_status] = { + geometry.public_law_restriction.legend_entry_id + } + else: + legend_entry_ids[geometry.public_law_restriction.law_status].add( + geometry.public_law_restriction.legend_entry_id + ) - return session.query(self.legend_entry_model).filter( - self.legend_entry_model.t_id.in_((distinct_legend_entry_ids))).all() + # Retrieve legend entries + legend_entries_from_db = [] + for law_status in legend_entry_ids: + legend_entries_from_db.append( + [ + self.get_legend_entries_from_db(session, list(legend_entry_ids[law_status])), + law_status + ] + ) + + return legend_entries_from_db def read(self, params, real_estate, bbox): """ @@ -537,20 +566,9 @@ def read(self, params, real_estate, bbox): else: # We found spatially related elements. This means we need to extract the actual plr # information related to the found geometries. - law_status_of_geometry = [] - # get distinct values of law_status for all geometries found - for geometry in geometry_results: - if (geometry.public_law_restriction.law_status not in law_status_of_geometry): - law_status_of_geometry.append(geometry.public_law_restriction.law_status) - legend_entries_from_db = [] # get legend_entries per law_status - for law_status in law_status_of_geometry: - legend_entry_with_law_status = [ - self.collect_legend_entries_by_bbox(session, bbox, law_status), - law_status - ] - legend_entries_from_db.append(legend_entry_with_law_status) + legend_entries_from_db = self.collect_legend_entries_by_bbox(session, bbox) self.records = [] for geometry_result in geometry_results: diff --git a/pyramid_oereb/contrib/data_sources/standard/sources/plr.py b/pyramid_oereb/contrib/data_sources/standard/sources/plr.py index 3aa9028ae1..c041c1caf1 100644 --- a/pyramid_oereb/contrib/data_sources/standard/sources/plr.py +++ b/pyramid_oereb/contrib/data_sources/standard/sources/plr.py @@ -614,7 +614,22 @@ def collect_related_geometries_by_real_estate(self, session, real_estate): .selectinload(self.models.PublicLawRestriction.responsible_office), ).all() - def collect_legend_entries_by_bbox(self, session, bbox, law_status): + def get_legend_entries_from_db(self, session, legend_entry_ids): + """ + Retrieves the legend entries for a list of id-values. + + Args: + session (sqlalchemy.orm.Session): The requested clean session instance ready for use + legend_entry_ids (list): list of the egend entry ids + + Returns: + list: the query result represented as a list. + """ + + return session.query(self.legend_entry_model).filter( + self.legend_entry_model.id.in_(legend_entry_ids)).all() + + def collect_legend_entries_by_bbox(self, session, bbox): """ Extracts all legend entries in the topic which have spatial relation with the passed bounding box of visible extent. @@ -622,22 +637,39 @@ def collect_legend_entries_by_bbox(self, session, bbox, law_status): Args: session (sqlalchemy.orm.Session): The requested clean session instance ready for use bbox (shapely.geometry.base.BaseGeometry): The bbox to search the records. - law_status (str): String of the law status for which the legend entries should be queried. Returns: list: The result of the related geometries unique by the public law restriction id and law status """ - distinct_legend_entry_ids = [] + # Select the legend entries of all plr within bbox geometries = self.handle_collection(session, bbox).options( selectinload(self.models.Geometry.public_law_restriction) ).all() + + # Compile a list of unique legend entry ids for each law status + legend_entry_ids = dict() for geometry in geometries: - if geometry.public_law_restriction.legend_entry_id not in distinct_legend_entry_ids \ - and geometry.public_law_restriction.law_status == law_status: - distinct_legend_entry_ids.append(geometry.public_law_restriction.legend_entry_id) - return session.query(self.legend_entry_model).filter( - self.legend_entry_model.id.in_((distinct_legend_entry_ids))).all() + if geometry.public_law_restriction.law_status not in legend_entry_ids: + legend_entry_ids[geometry.public_law_restriction.law_status] = { + geometry.public_law_restriction.legend_entry_id + } + else: + legend_entry_ids[geometry.public_law_restriction.law_status].add( + geometry.public_law_restriction.legend_entry_id + ) + + # Retrieve legend entries + legend_entries_from_db = [] + for law_status in legend_entry_ids: + legend_entries_from_db.append( + [ + self.get_legend_entries_from_db(session, list(legend_entry_ids[law_status])), + law_status + ] + ) + + return legend_entries_from_db def read(self, params, real_estate, bbox): # pylint: disable=W:0221 """ @@ -674,20 +706,8 @@ def read(self, params, real_estate, bbox): # pylint: disable=W:0221 # We found spatially related elements. This means we need to extract the actual plr # information related to the found geometries. - law_status_of_geometry = [] - # get distinct values of law_status for all geometries found - for geometry in geometry_results: - if (geometry.public_law_restriction.law_status not in law_status_of_geometry): - law_status_of_geometry.append(geometry.public_law_restriction.law_status) - - legend_entries_from_db = [] # get legend_entries per law_status - for law_status in law_status_of_geometry: - legend_entry_with_law_status = [ - self.collect_legend_entries_by_bbox(session, bbox, law_status), - law_status - ] - legend_entries_from_db.append(legend_entry_with_law_status) + legend_entries_from_db = self.collect_legend_entries_by_bbox(session, bbox) self.records = [] for geometry_result in geometry_results: diff --git a/pyramid_oereb/contrib/print_proxy/mapfish_print/mapfish_print.py b/pyramid_oereb/contrib/print_proxy/mapfish_print/mapfish_print.py index 62d92210a8..bc976a3707 100644 --- a/pyramid_oereb/contrib/print_proxy/mapfish_print/mapfish_print.py +++ b/pyramid_oereb/contrib/print_proxy/mapfish_print/mapfish_print.py @@ -73,12 +73,20 @@ def __call__(self, value, system): extract_as_dict = self._render(extract_record, value[1]) feature_geometry = mapping(extract_record.real_estate.limit) + print_config = Config.get('print', {}) + + if print_config.get('compute_toc_pages', False): + extract_as_dict['nbTocPages'] = TocPages(extract_as_dict).getNbPages() + else: + if print_config.get('expected_toc_length') and int(print_config.get('expected_toc_length')) > 0: + extract_as_dict['nbTocPages'] = print_config.get('expected_toc_length') + else: + extract_as_dict['nbTocPages'] = 1 + # set the global_datetime variable so that it can be used later for the archive self.set_global_datetime(extract_as_dict['CreationDate']) self.convert_to_printable_extract(extract_as_dict, feature_geometry) - print_config = Config.get('print', {}) - extract_as_dict['Display_RealEstate_SubunitOfLandRegister'] = print_config.get( 'display_real_estate_subunit_of_land_register', True ) @@ -97,7 +105,7 @@ def __call__(self, value, system): extract_as_dict['nbTocPages'] = 1 spec = { - 'layout': Config.get('print', {})['template_name'], + 'layout': print_config['template_name'], 'outputFormat': 'pdf', 'lang': self._language, 'attributes': extract_as_dict, @@ -108,37 +116,41 @@ def __call__(self, value, system): if self._request.GET.get('getspec', 'no') != 'no': response.headers['Content-Type'] = 'application/json; charset=UTF-8' return json.dumps(spec, sort_keys=True, indent=4) - pdf_url = urlparse.urljoin(Config.get('print', {})['base_url'] + '/', 'buildreport.pdf') - pdf_headers = Config.get('print', {})['headers'] + pdf_url = urlparse.urljoin(print_config['base_url'] + '/', 'buildreport.pdf') + pdf_headers = print_config['headers'] print_result = requests.post( pdf_url, headers=pdf_headers, data=json.dumps(spec) ) try: - if Config.get('print', {}).get('compute_toc_pages', False): - with io.BytesIO() as pdf: - pdf.write(print_result.content) - pdf_reader = PdfReader(pdf) - x = [] - for i in range(len(pdf_reader.outline)): - if isinstance(pdf_reader.outline[i], list): - x.append(pdf_reader.outline[i][0]['/Page']['/StructParents']) - else: - x.append(pdf_reader.outline[i]['/Page']['/StructParents']) - try: - true_nb_of_toc = min(x)-1 - except ValueError: - true_nb_of_toc = 1 - - if true_nb_of_toc != extract_as_dict['nbTocPages']: - log.warning('nbTocPages in result pdf: {} are not equal to the one predicted : {}, request new pdf'.format(true_nb_of_toc,extract_as_dict['nbTocPages'])) # noqa - extract_as_dict['nbTocPages'] = true_nb_of_toc - print_result = requests.post( - pdf_url, - headers=pdf_headers, - data=json.dumps(spec) - ) + log.debug('Validation of the TOC length with compute_toc_pages set to {} and expected_toc_length set to {}'.format(print_config.get('compute_toc_pages'), print_config.get('expected_toc_length'))) # noqa + with io.BytesIO() as pdf: + pdf.write(print_result.content) + pdf_reader = PdfReader(pdf) + x = [] + for i in range(len(pdf_reader.outline)): + if isinstance(pdf_reader.outline[i], list): + x.append(pdf_reader.outline[i][0]['/Page']['/StructParents']) + else: + x.append(pdf_reader.outline[i]['/Page']['/StructParents']) + try: + true_nb_of_toc = min(x)-1 + except ValueError: + true_nb_of_toc = 1 + + log.debug('True number of TOC pages is {}, expected number was {}'.format(true_nb_of_toc, extract_as_dict['nbTocPages'])) # noqa + if true_nb_of_toc != extract_as_dict['nbTocPages']: + log.warning('nbTocPages in result pdf: {} are not equal to the one predicted : {}, request new pdf'.format(true_nb_of_toc,extract_as_dict['nbTocPages'])) # noqa + log.debug('Secondary PDF extract call STARTED') + extract_as_dict['nbTocPages'] = true_nb_of_toc + print_result = requests.post( + pdf_url, + headers=pdf_headers, + data=json.dumps(spec) + ) + log.debug('Secondary PDF extract call to fix TOC pages number DONE') + except PdfReadError as e: err_msg = 'a problem occurred while generating the pdf file' log.error(err_msg + ': ' + str(e)) diff --git a/tests/contrib.data_sources.interlis_2_3/sources/test_plr.py b/tests/contrib.data_sources.interlis_2_3/sources/test_plr.py index 5cc5598897..b7844e0ed9 100644 --- a/tests/contrib.data_sources.interlis_2_3/sources/test_plr.py +++ b/tests/contrib.data_sources.interlis_2_3/sources/test_plr.py @@ -20,6 +20,7 @@ from pyramid_oereb.contrib.data_sources.interlis_2_3.sources.plr import ( StandardThemeConfigParser ) +from pyramid_oereb.contrib.data_sources.interlis_2_3.sources.plr import DatabaseSource @pytest.fixture(scope='session') @@ -209,6 +210,21 @@ def oblique_limit_real_estate_record(): ) +@pytest.fixture +def plr_source_params(db_connection): + yield { + "source": { + "class": "pyramid_oereb.contrib.data_sources.interlis_2_3.sources.plr.DatabaseSource", + "params": { + "db_connection": db_connection, + "model_factory": "pyramid_oereb.contrib.data_sources.interlis_2_3." + "models.theme.model_factory_integer_pk", + "schema_name": "land_use_plans" + } + } + } + + # @pytest.fixture # def interlis_real_estate(): # theme = ThemeRecord('code', dict(), 100) @@ -286,3 +302,107 @@ def test_related_geometries(processor_data, pyramid_oereb_test_config, interlis_ assert len(extract_raw.real_estate.public_law_restrictions) == nb_results extract = processor.plr_tolerance_check(extract_raw) assert len(extract.real_estate.public_law_restrictions) == nb_results + + +def mock_session_object_query_geometries(items_list): + class PublicLawRestrictionTest(): + def __init__(self, law_status, legend_entry_id): + self.law_status = law_status + self.legend_entry_id = legend_entry_id + + class GeometryTest(): + def __init__(self, public_law_restriction): + self.public_law_restriction = public_law_restriction + + geometries = [] + for item in items_list: + geometries.append(GeometryTest(PublicLawRestrictionTest(item[0], item[1]))) + + class AllTest(): + def all(): + return iter(geometries) + + class DistinctTest(): + def __init__(self): + pass + + def options(arg2): + return AllTest + + class FilterTest(): + def __init__(self): + pass + + def distinct(arg2): + return DistinctTest + + class QueryTest(): + def __init__(self): + pass + + def filter(arg3): + return FilterTest + + class SessionTest(): + def __init__(self): + pass + + def query(arg1): + return QueryTest + + return SessionTest + + +def get_return_vals_of_get_legend_entries_from_db(arg1, arg2, list_of_ids): + return_value = [] + for id in list_of_ids: + return_value.append((id, )) + return return_value + + +@pytest.mark.parametrize('idx,items_list', [ + (0, [ + ["inForce", 1], + ["changeWithoutPreEffect", 1], + ["changeWithoutPreEffect", 2], + ["inForce", 3], + ["inForce", 4], + ["inForce", 3], + ["changeWithoutPreEffect", 6], + ["inForce", 7], + ["changeWithoutPreEffect", 2], + ["inForce", 9], + ["changeWithoutPreEffect", 7] + ]), + (1, [ + ["inForce", 1], + ["inForce", 3], + ["inForce", 4], + ["inForce", 3], + ["inForce", 7], + ["inForce", 9], + ]) +]) +def test_collect_legend_entries_by_bbox(idx, items_list, plr_source_params): + with ( + patch.object( + DatabaseSource, + 'get_legend_entries_from_db', + get_return_vals_of_get_legend_entries_from_db + ) + ): + source = DatabaseSource(**plr_source_params) + result = source.collect_legend_entries_by_bbox( + mock_session_object_query_geometries(items_list), + Polygon(((0., 0.), (0., 1.), (1., 1.), (1., 0.), (0., 0.)))) + + if idx == 0: + assert len(result) == 2 + assert sorted([x[0] for x in result if x[1] == 'inForce'][0]) == \ + [(1, ), (3, ), (4, ), (7, ), (9, )] + assert sorted([x[0] for x in result if x[1] == 'changeWithoutPreEffect'][0]) == \ + [(1, ), (2, ), (6, ), (7, )] + if idx == 1: + assert len(result) == 1 + assert sorted([x[0] for x in result if x[1] == 'inForce'][0]) == \ + [(1, ), (3, ), (4, ), (7, ), (9, )] diff --git a/tests/contrib.data_sources.standard/sources/test_plr.py b/tests/contrib.data_sources.standard/sources/test_plr.py index 33fe945136..8e4a8c7713 100644 --- a/tests/contrib.data_sources.standard/sources/test_plr.py +++ b/tests/contrib.data_sources.standard/sources/test_plr.py @@ -1247,3 +1247,90 @@ def test_handle_collection(plr_source_params, all_plr_result_session, real_estat ST_GeomFromWKB(%(ST_GeomFromWKB_1)s, %(ST_GeomFromWKB_2)s) ) '''.replace('\n', '').replace(' ', '') + + +def mock_return_value_handle_collection(items_list): + class PublicLawRestrictionTest(): + def __init__(self, law_status, legend_entry_id): + self.law_status = law_status + self.legend_entry_id = legend_entry_id + + class GeometryTest(): + def __init__(self, public_law_restriction): + self.public_law_restriction = public_law_restriction + + geometries = [] + for item in items_list: + geometries.append(GeometryTest(PublicLawRestrictionTest(item[0], item[1]))) + + class AllTest(): + def __init__(arg1, arg2): + pass + + def all(arg3): + return iter(geometries) + + class OptionsTest(): + def __init__(self, arg1, arg2): + pass + options = AllTest + + return OptionsTest + + +def get_return_vals_of_get_legend_entries_from_db(arg1, arg2, list_of_ids): + return_value = [] + for id in list_of_ids: + return_value.append((id, )) + return return_value + + +@pytest.mark.parametrize('idx,items_list', [ + (0, [ + ["inForce", 1], + ["changeWithoutPreEffect", 1], + ["changeWithoutPreEffect", 2], + ["inForce", 3], + ["inForce", 4], + ["inForce", 3], + ["changeWithoutPreEffect", 6], + ["inForce", 7], + ["changeWithoutPreEffect", 2], + ["inForce", 9], + ["changeWithoutPreEffect", 7] + ]), + (1, [ + ["inForce", 1], + ["inForce", 3], + ["inForce", 4], + ["inForce", 3], + ["inForce", 7], + ["inForce", 9], + ]) +]) +def test_collect_legend_entries_by_bbox(idx, items_list, plr_source_params): + with ( + patch.object( + DatabaseSource, + 'handle_collection', + mock_return_value_handle_collection(items_list) + ), + patch.object( + DatabaseSource, + 'get_legend_entries_from_db', + get_return_vals_of_get_legend_entries_from_db + ) + ): + source = DatabaseSource(**plr_source_params) + result = source.collect_legend_entries_by_bbox("", "") + + if idx == 0: + assert len(result) == 2 + assert sorted([x[0] for x in result if x[1] == 'inForce'][0]) == \ + [(1, ), (3, ), (4, ), (7, ), (9, )] + assert sorted([x[0] for x in result if x[1] == 'changeWithoutPreEffect'][0]) == \ + [(1, ), (2, ), (6, ), (7, )] + if idx == 1: + assert len(result) == 1 + assert sorted([x[0] for x in result if x[1] == 'inForce'][0]) == \ + [(1, ), (3, ), (4, ), (7, ), (9, )] diff --git a/tests/contrib.print_proxy.mapfish_print/resources/test_config.yml b/tests/contrib.print_proxy.mapfish_print/resources/test_config.yml index 3b1dd144ee..027758c227 100644 --- a/tests/contrib.print_proxy.mapfish_print/resources/test_config.yml +++ b/tests/contrib.print_proxy.mapfish_print/resources/test_config.yml @@ -4,6 +4,8 @@ pyramid_oereb: wms_url_params: TRANSPARENT: 'true' OTHERCUSTOM: 'myvalue' + compute_toc_pages: false + expected_toc_length: 2 theme: source: diff --git a/tests/contrib.print_proxy.mapfish_print/test_mapfish_print_configuration.py b/tests/contrib.print_proxy.mapfish_print/test_mapfish_print_configuration.py index a2ff8d4aae..9066a8050b 100644 --- a/tests/contrib.print_proxy.mapfish_print/test_mapfish_print_configuration.py +++ b/tests/contrib.print_proxy.mapfish_print/test_mapfish_print_configuration.py @@ -100,3 +100,14 @@ def test_default_wms_url_param_config(DummyRenderInfo): config = renderer.get_wms_url_params() # Do the check for this test. Value should be the default setting. assert config == {'TRANSPARENT': 'true'} + + +def test_toc_pages_default_config(): + Config._config = None + Config.init('./tests/contrib.print_proxy.mapfish_print/resources/test_config.yml', 'pyramid_oereb') + compute_toc_pages = Config.get('print', {}).get('compute_toc_pages') + expected_toc_length = Config.get('print', {}).get('expected_toc_length') + + assert isinstance(compute_toc_pages, bool) + assert bool(compute_toc_pages) is False + assert expected_toc_length == 2