diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..d70b554 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,18 @@ +[run] +relative_files = true +omit = + */tests/* + */*_check.py + */__init__.py + */conftest.py + */.tox/* + */.venv/* + */.eggs/* + */docs/* + */build/* + */dist/* + */.git/* + */.github/* + */.vscode/* + */.pytest_cache/* + */migrations/* diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2131eab..fda1548 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,11 +29,32 @@ jobs: echo ${{ runner.os }} ${{ matrix.os }} python -m pip install --upgrade pip pip install -r requirements.txt -# - name: pre-commit -# if: runner.os == 'Linux' # we do not need static code analyzes on each OS, but we want to repeat it on Linux only for each Python version in matrix -# run: | -# pip install flake8 pylint pre-commit -# pre-commit run --all-files - name: Test run: | - python -m pytest --verbose + python -m pytest -vv --junitxml=pytest.xml --cov-report=term-missing:skip-covered --cov=src tests/ + + - name: Upload coverage data to coveralls.io + if: ${{ matrix.python-version == 3.11 && matrix.platform == 'ubuntu-latest' }} + run: coveralls --service=github + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload Coverage to Codecov + if: ${{ matrix.python-version == 3.11 && matrix.platform == 'ubuntu-latest' }} + uses: codecov/codecov-action@v2 + + - name: Coverage comment + id: coverage_comment + if: ${{ matrix.python-version == 3.11 && matrix.platform == 'ubuntu-latest' }} + uses: py-cov-action/python-coverage-comment-action@v3 + with: + GITHUB_TOKEN: ${{ github.token }} + MINIMUM_GREEN: 85 + MINIMUM_ORANGE: 70 + + - name: Store Pull Request comment to be posted + uses: actions/upload-artifact@v3 + if: steps.coverage_comment.outputs.COMMENT_FILE_WRITTEN == 'true' + with: + name: python-coverage-comment-action + path: python-coverage-comment-action.txt diff --git a/.github/workflows/static.yml b/.github/workflows/static.yml new file mode 100644 index 0000000..6efd90f --- /dev/null +++ b/.github/workflows/static.yml @@ -0,0 +1,26 @@ +# build docs from docstrings and markdown-file in folder docs +# publish them as github pages +# +#note for myself +# +# Do not forget to set git branch `gh-pages` as source for github pages. +# This branch auto-updated by `mkdocs gh-deploy` +# +name: static source code checks +on: + push: + branches: + - main + - master +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + with: + python-version: '3.11' + - run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - run: pre-commit run --verbose --all-files diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..9982420 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,45 @@ +exclude: | + (?x)( + tests/| + docs/| + core/| + venv/| + __about__.py + ) +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.1.11 + hooks: + # Run the linter. + - id: ruff + args: [ --fix ] + # Run the formatter. + - id: ruff-format + + - repo: local + hooks: + - id: mypy + name: MyPy + entry: | + mypy + --ignore-missing-imports + --install-types + --non-interactive + --strict + --implicit-reexport + --warn-unused-ignores + --cache-fine-grained + --no-namespace-packages + files: \.py$ + language: python + + - id: pylint + name: Pylint + entry: | + bash -c 'PYTHONPATH=. pylint \ + --max-line-length=99 \ + --ignore-imports=yes \ + "$@"' -- + files: \.py$ + language: python diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 0000000..7e42192 --- /dev/null +++ b/.ruff.toml @@ -0,0 +1 @@ +line-length = 99 diff --git a/README.md b/README.md index bd6c29e..539abb5 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ [![Build Status](https://github.com/andgineer/ambientweather_livedata/workflows/ci/badge.svg)](https://github.com/andgineer/ambientweather_livedata/actions) +[![Coverage](https://raw.githubusercontent.com/andgineer/ambientweather_livedata/python-coverage-comment-action-data/badge.svg)](https://htmlpreview.github.io/?https://github.com/andgineer/ambientweather_livedata/blob/python-coverage-comment-action-data/htmlcov/index.html) # Extract data from Ambient Weather stations Python3 library that extracts information from [Ambient Weather stations](https://www.ambientweather.com/). @@ -31,4 +32,7 @@ Example: battery=inSensor.battery )) +## Coverage report +* [Codecov](https://app.codecov.io/gh/andgineer/ambientweather_livedata/tree/master/src) +* [Coveralls](https://coveralls.io/github/andgineer/ambientweather_livedata) diff --git a/conftest.py b/conftest.py index 9398455..83cf3bd 100644 --- a/conftest.py +++ b/conftest.py @@ -10,4 +10,4 @@ import sys -sys.path.append(os.path.join(os.path.dirname(__file__), 'src')) +sys.path.append(os.path.join(os.path.dirname(__file__), "src")) diff --git a/requirements.txt b/requirements.txt index f7e9a6e..a7539df 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,12 @@ lxml requests -# dev + +# test pytest +pytest-cov +coveralls + +# lint +pre-commit +pylint +mypy diff --git a/src/ambientweather.py b/src/ambientweather.py index b42e9c1..baab294 100644 --- a/src/ambientweather.py +++ b/src/ambientweather.py @@ -1,35 +1,39 @@ +"""Extract data from Ambient Weather stations.""" +from typing import List, Tuple + +from datetime import datetime import requests from lxml import html -from datetime import datetime -TITLE = 'LiveData' # HTML live data page title +TITLE = "LiveData" # HTML live data page title +TIMEOUT = 5 # seconds -class SensorData(object): - """ - time - temp - humidity - abs_press - rel_press - battery ('Normal') - """ +class SensorData: + """Sensor data object""" + + time: datetime + temp: float + humidity: float + abs_press: float + rel_press: float + battery: List[str] # ('Normal') + + def parse(self, live_data_html: bytes) -> Tuple["SensorData", "SensorData"]: + """Extract sensor's data from html (LiveData.html from your ObserverIP). - def parse(self, live_data_html): - """ - Extract sensor's data from html (LiveData.html from your ObserverIP) Returns touple with (sensor1, sensor2 -> SensorData) """ tree = html.fromstring(live_data_html) - title = tree.xpath('//title/text()') + title = tree.xpath("//title/text()") if title[0] != TITLE: - raise ValueError(f'Wrong html page. Good one have to have title {TITLE}') + raise ValueError(f"Wrong html page. Good one have to have title {TITLE}") in_sensor = SensorData() time_str = tree.xpath('//input[@name="CurrTime"]/@value')[0] - in_sensor.time = datetime.strptime(time_str, '%H:%M %m/%d/%Y') + in_sensor.time = datetime.strptime(time_str, "%H:%M %m/%d/%Y") in_sensor.temp = float(tree.xpath('//input[@name="inTemp"]/@value')[0]) in_sensor.humidity = float(tree.xpath('//input[@name="inHumi"]/@value')[0]) in_sensor.abs_press = float(tree.xpath('//input[@name="AbsPress"]/@value')[0]) @@ -46,12 +50,12 @@ def parse(self, live_data_html): return in_sensor, out_sensor - def get(self, url): + def get(self, url: str) -> Tuple["SensorData", "SensorData"]: """ Load ObserverIP live data page from the URL and parse it Returns touple with (sensor1, sensor2 -> SensorData) """ - page = requests.get(url).content + page = requests.get(url, timeout=TIMEOUT).content return self.parse(page)