Skip to content

Commit 64ab511

Browse files
authored
Merge pull request #140 from podaac/release/2.3.0
Release/2.3.0
2 parents 79e5e9f + d24867b commit 64ab511

14 files changed

+3062
-2222
lines changed

.github/workflows/build-pipeline.yml

Lines changed: 36 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ name: Build
44
on:
55
# Triggers the workflow on push events
66
push:
7-
branches: [ develop, release/**, main, feature/** ]
7+
branches: [ develop, release/**, main, feature/**, issue/**, issues/**, dependabot/** ]
88

99
# Allows you to run this workflow manually from the Actions tab
1010
workflow_dispatch:
@@ -36,7 +36,10 @@ jobs:
3636
echo "pyproject_name=$(poetry version | awk '{print $1}')" >> $GITHUB_ENV
3737
- name: Bump pre-alpha version
3838
# If triggered by push to a feature branch
39-
if: ${{ startsWith(github.ref, 'refs/heads/feature/') }}
39+
if: |
40+
${{ startsWith(github.ref, 'refs/heads/issue') }} ||
41+
${{ startsWith(github.ref, 'refs/heads/dependabot/') }} ||
42+
${{ startsWith(github.ref, 'refs/heads/feature/') }}
4043
run: |
4144
new_ver="${{ steps.get-version.outputs.current_version }}+$(git rev-parse --short ${GITHUB_SHA})"
4245
poetry version $new_ver
@@ -97,6 +100,17 @@ jobs:
97100
-Dsonar.projectName=l2ss-py
98101
-Dsonar.projectVersion=${{ env.software_version }}
99102
-Dsonar.python.version=3.7,3.8,3.9
103+
- name: Run Snyk as a blocking step
104+
uses: snyk/actions/python-3.8@master
105+
env:
106+
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
107+
with:
108+
command: test
109+
args: >
110+
--org=${{ secrets.SNYK_ORG_ID }}
111+
--project-name=${{ github.repository }}
112+
--severity-threshold=high
113+
--fail-on=all
100114
- name: Run Snyk on Python
101115
uses: snyk/actions/python-3.8@master
102116
env:
@@ -160,6 +174,7 @@ jobs:
160174
name: python-artifact
161175
path: dist/*
162176
- name: Publish to test.pypi.org
177+
id: pypi-test-publish
163178
if: |
164179
github.ref == 'refs/heads/develop' ||
165180
startsWith(github.ref, 'refs/heads/release')
@@ -170,34 +185,43 @@ jobs:
170185
poetry publish -r testpypi
171186
- name: Publish to pypi.org
172187
if: ${{ github.ref == 'refs/heads/main' }}
188+
id: pypi-publish
173189
env:
174190
POETRY_PYPI_TOKEN_PYPI: ${{secrets.POETRY_PYPI_TOKEN_PYPI}}
175191
run: |
176192
poetry publish
177193
- name: Log in to the Container registry
178-
if: ${{ !startsWith(github.ref, 'refs/heads/feature') }}
194+
if: |
195+
steps.pypi-test-publish.conclusion == 'success' ||
196+
steps.pypi-publish.conclusion == 'success'
179197
uses: docker/login-action@v1
180198
with:
181199
registry: ${{ env.REGISTRY }}
182200
username: ${{ github.actor }}
183201
password: ${{ secrets.GITHUB_TOKEN }}
184202
- name: Extract metadata (tags, labels) for Docker
185-
if: ${{ !startsWith(github.ref, 'refs/heads/feature') }}
203+
if: |
204+
steps.pypi-test-publish.conclusion == 'success' ||
205+
steps.pypi-publish.conclusion == 'success'
186206
id: meta
187-
uses: docker/metadata-action@v3
207+
uses: docker/metadata-action@v4
188208
with:
189209
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
190210
tags: |
191211
type=semver,pattern={{version}},value=${{ env.software_version }}
192212
type=raw,value=${{ env.venue }}
193213
- name: Wait for package
194-
if: ${{ !startsWith(github.ref, 'refs/heads/feature') }}
214+
if: |
215+
steps.pypi-test-publish.conclusion == 'success' ||
216+
steps.pypi-publish.conclusion == 'success'
195217
run: |
196218
pip install tenacity
197219
${GITHUB_WORKSPACE}/.github/workflows/wait-for-pypi.py ${{env.pyproject_name}}[harmony]==${{ env.software_version }}
198220
- name: Build and push Docker image
199-
if: ${{ !startsWith(github.ref, 'refs/heads/feature') }}
200-
uses: docker/build-push-action@v2
221+
if: |
222+
steps.pypi-test-publish.conclusion == 'success' ||
223+
steps.pypi-publish.conclusion == 'success'
224+
uses: docker/build-push-action@v3
201225
with:
202226
context: .
203227
file: docker/Dockerfile
@@ -208,21 +232,16 @@ jobs:
208232
tags: ${{ steps.meta.outputs.tags }}
209233
labels: ${{ steps.meta.outputs.labels }}
210234
- name: Run Snyk on Docker Image
211-
if: ${{ !startsWith(github.ref, 'refs/heads/feature') }}
235+
if: |
236+
steps.pypi-test-publish.conclusion == 'success' ||
237+
steps.pypi-publish.conclusion == 'success'
212238
# Snyk can be used to break the build when it detects vulnerabilities.
213239
# In this case we want to upload the issues to GitHub Code Scanning
214240
continue-on-error: true
215241
uses: snyk/actions/docker@master
216242
env:
217243
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
218244
with:
219-
image: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.software_version }}
245+
image: ${{ steps.meta.outputs.tags[0] }}
220246
args: >
221247
--severity-threshold=high
222-
--file=./docker/Dockerfile
223-
--sarif-file-output=docker.sarif
224-
- name: Upload result to GitHub Code Scanning
225-
if: ${{ !startsWith(github.ref, 'refs/heads/feature') }}
226-
uses: github/codeql-action/upload-sarif@v2
227-
with:
228-
sarif_file: ./

CHANGELOG.md

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
1212
### Fixed
1313
### Security
1414

15+
16+
## [2.3.0]
17+
### Added
18+
- [issue/126](https://github.com/podaac/l2ss-py/issues/126): Added flexibility to variable subsetting
19+
for variables to not have leading slash in the front
20+
- [issue/136](https://github.com/podaac/l2ss-py/issues/136): Added type annotations throughout the package code
21+
### Changed
22+
### Deprecated
23+
### Removed
24+
### Fixed
25+
- PODAAC-5065: integration with SMAP_RSS_L2_SSS_V5, fix way xarray open granules that have `seconds since 2000-1-1 0:0:0 0` as a time unit.
26+
- [issue/127](https://github.com/podaac/l2ss-py/issues/127): Fixed bug when subsetting variables in grouped datasets. Variable names passed to `subset` will now have `/` replaced by `GROUP_DELIM` so they can be located in flattened datasets
27+
### Security
28+
1529
## [2.2.0]
1630
### Added
1731
### Changed
@@ -40,6 +54,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
4054
- [issue/105](https://github.com/podaac/l2ss-py/issues/105): Added function to convert np object to python native objects.
4155
### Security
4256

57+
4358
## [2.0.0]
4459
### Added
4560
- [issue/98](https://github.com/podaac/l2ss-py/issues/98): Added logic to handle time decoding for he5 tai93 files. Changed the min and max inputs to tai93 format and compared to the time format in the file
@@ -51,6 +66,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
5166
- **Breaking Change** [issue/99](https://github.com/podaac/l2ss-py/issues/99): Removed support for python 3.7
5267
### Fixed
5368
- [issue/95](https://github.com/podaac/l2ss-py/issues/95): Fix non variable subsets for OMI since variables are not in the same group as the lat lon variables
69+
5470
### Security
5571

5672

cmr/ops_associations.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,3 +53,5 @@ C2254232941-POCLOUD
5353
C2251464384-POCLOUD
5454
C2247621105-POCLOUD
5555
C2152045877-POCLOUD
56+
C1940471193-POCLOUD
57+
C2205121315-POCLOUD

podaac/subsetter/dimension_cleanup.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,14 @@
1111
======================
1212
Functions which improve upon existing netCDF4 library existing functions
1313
"""
14-
1514
import collections
15+
from typing import List, Tuple
16+
17+
import netCDF4 as nc
18+
import xarray as xr
1619

1720

18-
def remove_duplicate_dims(nc_dataset):
21+
def remove_duplicate_dims(nc_dataset: nc.Dataset) -> Tuple[nc.Dataset, List[str]]:
1922
"""
2023
xarray cannot read netCDF4 datasets with duplicate dimensions.
2124
Function goes through a dataset to catch any variables with duplicate dimensions.
@@ -67,7 +70,7 @@ def remove_duplicate_dims(nc_dataset):
6770
return nc_dataset, dup_new_varnames
6871

6972

70-
def rename_dup_vars(dataset, rename_vars):
73+
def rename_dup_vars(dataset: xr.Dataset, rename_vars: List[str]) -> xr.Dataset:
7174
"""
7275
NetCDF4 rename function raises and HDF error for variable in S5P files with duplicate dimensions
7376
This method will use xarray to rename the variables

0 commit comments

Comments
 (0)