Skip to content

Commit

Permalink
https://github.com/andgineer/ambientweather_livedata/issues/1
Browse files Browse the repository at this point in the history
coverage in CI
pre-commit
  • Loading branch information
andreisorokin-astrazeneca committed Jan 16, 2024
1 parent 6db15dd commit 1d262d2
Show file tree
Hide file tree
Showing 9 changed files with 154 additions and 27 deletions.
18 changes: 18 additions & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[run]
relative_files = true
omit =
*/tests/*
*/*_check.py
*/__init__.py
*/conftest.py
*/.tox/*
*/.venv/*
*/.eggs/*
*/docs/*
*/build/*
*/dist/*
*/.git/*
*/.github/*
*/.vscode/*
*/.pytest_cache/*
*/migrations/*
33 changes: 27 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,32 @@ jobs:
echo ${{ runner.os }} ${{ matrix.os }}
python -m pip install --upgrade pip
pip install -r requirements.txt
# - name: pre-commit
# if: runner.os == 'Linux' # we do not need static code analyzes on each OS, but we want to repeat it on Linux only for each Python version in matrix
# run: |
# pip install flake8 pylint pre-commit
# pre-commit run --all-files
- name: Test
run: |
python -m pytest --verbose
python -m pytest -vv --junitxml=pytest.xml --cov-report=term-missing:skip-covered --cov=src tests/
- name: Upload coverage data to coveralls.io
if: ${{ matrix.python-version == 3.11 && matrix.platform == 'ubuntu-latest' }}
run: coveralls --service=github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

- name: Upload Coverage to Codecov
if: ${{ matrix.python-version == 3.11 && matrix.platform == 'ubuntu-latest' }}
uses: codecov/codecov-action@v2

- name: Coverage comment
id: coverage_comment
if: ${{ matrix.python-version == 3.11 && matrix.platform == 'ubuntu-latest' }}
uses: py-cov-action/python-coverage-comment-action@v3
with:
GITHUB_TOKEN: ${{ github.token }}
MINIMUM_GREEN: 85
MINIMUM_ORANGE: 70

- name: Store Pull Request comment to be posted
uses: actions/upload-artifact@v3
if: steps.coverage_comment.outputs.COMMENT_FILE_WRITTEN == 'true'
with:
name: python-coverage-comment-action
path: python-coverage-comment-action.txt
26 changes: 26 additions & 0 deletions .github/workflows/static.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# build docs from docstrings and markdown-file in folder docs
# publish them as github pages
#
#note for myself
#
# Do not forget to set git branch `gh-pages` as source for github pages.
# This branch auto-updated by `mkdocs gh-deploy`
#
name: static source code checks
on:
push:
branches:
- main
- master
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v3
with:
python-version: '3.11'
- run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- run: pre-commit run --verbose --all-files
45 changes: 45 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
exclude: |
(?x)(
tests/|
docs/|
core/|
venv/|
__about__.py
)
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.11
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format

- repo: local
hooks:
- id: mypy
name: MyPy
entry: |
mypy
--ignore-missing-imports
--install-types
--non-interactive
--strict
--implicit-reexport
--warn-unused-ignores
--cache-fine-grained
--no-namespace-packages
files: \.py$
language: python

- id: pylint
name: Pylint
entry: |
bash -c 'PYTHONPATH=. pylint \
--max-line-length=99 \
--ignore-imports=yes \
"$@"' --
files: \.py$
language: python
1 change: 1 addition & 0 deletions .ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
line-length = 99
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
[![Build Status](https://github.com/andgineer/ambientweather_livedata/workflows/ci/badge.svg)](https://github.com/andgineer/ambientweather_livedata/actions)
[![Coverage](https://raw.githubusercontent.com/andgineer/ambientweather_livedata/python-coverage-comment-action-data/badge.svg)](https://htmlpreview.github.io/?https://github.com/andgineer/ambientweather_livedata/blob/python-coverage-comment-action-data/htmlcov/index.html)
# Extract data from Ambient Weather stations

Python3 library that extracts information from [Ambient Weather stations](https://www.ambientweather.com/).
Expand Down Expand Up @@ -31,4 +32,7 @@ Example:
battery=inSensor.battery
))

## Coverage report
* [Codecov](https://app.codecov.io/gh/andgineer/ambientweather_livedata/tree/master/src)
* [Coveralls](https://coveralls.io/github/andgineer/ambientweather_livedata)

2 changes: 1 addition & 1 deletion conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@
import sys


sys.path.append(os.path.join(os.path.dirname(__file__), 'src'))
sys.path.append(os.path.join(os.path.dirname(__file__), "src"))
10 changes: 9 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
lxml
requests
# dev

# test
pytest
pytest-cov
coveralls

# lint
pre-commit
pylint
mypy
42 changes: 23 additions & 19 deletions src/ambientweather.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,39 @@
"""Extract data from Ambient Weather stations."""
from typing import List, Tuple

from datetime import datetime
import requests
from lxml import html
from datetime import datetime


TITLE = 'LiveData' # HTML live data page title
TITLE = "LiveData" # HTML live data page title
TIMEOUT = 5 # seconds


class SensorData(object):
"""
time
temp
humidity
abs_press
rel_press
battery ('Normal')
"""
class SensorData:
"""Sensor data object"""

time: datetime
temp: float
humidity: float
abs_press: float
rel_press: float
battery: List[str] # ('Normal')

def parse(self, live_data_html: bytes) -> Tuple["SensorData", "SensorData"]:
"""Extract sensor's data from html (LiveData.html from your ObserverIP).
def parse(self, live_data_html):
"""
Extract sensor's data from html (LiveData.html from your ObserverIP)
Returns touple with (sensor1, sensor2 -> SensorData)
"""

tree = html.fromstring(live_data_html)
title = tree.xpath('//title/text()')
title = tree.xpath("//title/text()")
if title[0] != TITLE:
raise ValueError(f'Wrong html page. Good one have to have title {TITLE}')
raise ValueError(f"Wrong html page. Good one have to have title {TITLE}")

in_sensor = SensorData()
time_str = tree.xpath('//input[@name="CurrTime"]/@value')[0]
in_sensor.time = datetime.strptime(time_str, '%H:%M %m/%d/%Y')
in_sensor.time = datetime.strptime(time_str, "%H:%M %m/%d/%Y")
in_sensor.temp = float(tree.xpath('//input[@name="inTemp"]/@value')[0])
in_sensor.humidity = float(tree.xpath('//input[@name="inHumi"]/@value')[0])
in_sensor.abs_press = float(tree.xpath('//input[@name="AbsPress"]/@value')[0])
Expand All @@ -46,12 +50,12 @@ def parse(self, live_data_html):

return in_sensor, out_sensor

def get(self, url):
def get(self, url: str) -> Tuple["SensorData", "SensorData"]:
"""
Load ObserverIP live data page from the URL and parse it
Returns touple with (sensor1, sensor2 -> SensorData)
"""

page = requests.get(url).content
page = requests.get(url, timeout=TIMEOUT).content
return self.parse(page)

0 comments on commit 1d262d2

Please sign in to comment.