-
Notifications
You must be signed in to change notification settings - Fork 21
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
This uses `black`, `isort` and `flake8` to check code quality, although failure is ignored until we've cleaned it up (which has begin in PR #139 against the `revamp` branch). Minimal unit testing is introduced, generating a code coverage report. The text summary is added to the Action summary page, and the more detailed HTML report is stored as an artifact for download. NOTE: The GitHub Action environment is unhappy with `uvicorn` 0.15; upgrading to the latest 0.32.x seems to work and hasn't obviously broken anything else.
- Loading branch information
Showing
9 changed files
with
1,735 additions
and
789 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
name: "CodeQL config for Javascript" | ||
|
||
paths: | ||
- frontend/src/** |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
name: "CodeQL config for Python" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
name: "CodeQL" | ||
|
||
on: | ||
push: | ||
branches: [ main, 'b[0-9].[0-9]+' ] | ||
pull_request: | ||
# The branches below must be a subset of the branches above | ||
branches: [ main, 'b[0-9].[0-9]+' ] | ||
|
||
jobs: | ||
analyze: | ||
name: Analyze | ||
runs-on: ubuntu-latest | ||
permissions: | ||
actions: read | ||
contents: read | ||
security-events: write | ||
|
||
strategy: | ||
fail-fast: false | ||
matrix: | ||
language: [ 'javascript', 'python' ] | ||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] | ||
# Learn more about CodeQL language support at https://git.io/codeql-language-support | ||
|
||
steps: | ||
- name: Checkout repository | ||
uses: actions/checkout@v3 | ||
|
||
# Initializes the CodeQL tools for scanning. | ||
- name: Initialize CodeQL | ||
uses: github/codeql-action/init@v2 | ||
with: | ||
languages: ${{ matrix.language }} | ||
config-file: ./.github/codeql/${{ matrix.language }}-config.yml | ||
# If you wish to specify custom queries, you can do so here or in a config file. | ||
# By default, queries listed here will override any specified in a config file. | ||
# Prefix the list here with "+" to use these queries and those in the config file. | ||
# queries: ./path/to/local/query, your-org/your-repo/queries@main | ||
|
||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java). | ||
# If this step fails, then you should remove it and run the build manually (see below) | ||
#- name: Autobuild | ||
# uses: github/codeql-action/autobuild@v2 | ||
|
||
# ℹ️ Command-line programs to run using the OS shell. | ||
# 📚 https://git.io/JvXDl | ||
|
||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines | ||
# and modify them (or add more) to build your code if your project | ||
# uses a compiled language | ||
|
||
#- run: | | ||
# make bootstrap | ||
# make release | ||
|
||
- name: Perform CodeQL Analysis | ||
uses: github/codeql-action/analyze@v2 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
name: Python checks | ||
|
||
on: | ||
push: | ||
branches: ["main"] | ||
tags-ignore: ["**"] | ||
pull_request: | ||
|
||
env: | ||
COVERAGE: ${{ github.workspace }}/coverage | ||
|
||
jobs: | ||
build: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
python-version: ["3.9.20"] | ||
|
||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up Python ${{ matrix.python-version }} | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: ${{ matrix.python-version }} | ||
- name: Install dependencies | ||
run: | | ||
python -m pip install --upgrade pip | ||
pip install tox>=4.19 | ||
- name: Check for lint | ||
# Report errors but don't fail until we achieve stability! | ||
continue-on-error: true | ||
run: | | ||
cd backend | ||
tox -e format,isort,lint | ||
- name: Run unit tests | ||
run: | | ||
cd backend | ||
tox -e unit | ||
- name: Add coverage data to conversation | ||
run: cat $COVERAGE/coverage.txt >> $GITHUB_STEP_SUMMARY | ||
- name: Publish coverage data | ||
uses: actions/upload-artifact@v4 | ||
with: | ||
name: Coverage for ${{ github.event.head_commit.id }} | ||
path: ${{ env.COVERAGE }}/html | ||
if-no-files-found: warn | ||
retention-days: 30 |
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,13 +1,17 @@ | ||
[tool.poetry] | ||
name = "openshift_perfscale_api" | ||
packages = [ | ||
{ include = "app" } | ||
] | ||
version = "0.1.1" | ||
description = "python transformer of openshift performance and scale test results" | ||
authors = ["mleader <[email protected]>"] | ||
|
||
[tool.poetry.dependencies] | ||
python = "^3.9" | ||
python = "^3.9.20" | ||
cryptography = "^3.4.8" | ||
pandas = "1.2.4" | ||
numpy = "1.26.4" | ||
vyper-config = "1.0.0" | ||
semver = "2.13.0" | ||
splunk-sdk = "2.0.1" | ||
|
@@ -16,17 +20,84 @@ httptools = "^0.2.0" | |
elasticsearch = "7.13.4" | ||
fastapi = "^0.104.1" | ||
pydantic = "2.3.0" | ||
uvicorn = "^0.14.0" | ||
uvicorn = "^0.32.0" | ||
trio = "^0.18.0" | ||
aiohttp = "^3.7.4" | ||
httpx = "^0.18.1" | ||
orjson = "^3.5.3" | ||
atlassian-python-api = "^3.41.9" | ||
python-keycloak = "^3.12.0" | ||
pytest = "^8.3.4" | ||
pytest-asyncio = "^0.24" | ||
pytest-cov = "^6.0" | ||
tox = "^4.23.2" | ||
|
||
[tool.poetry.dev-dependencies] | ||
watchgod = "^0.7" | ||
|
||
[tool.pytest.ini_options] | ||
asyncio_mode = "auto" | ||
asyncio_default_fixture_loop_scope = "function" | ||
|
||
[tool.isort] | ||
profile = "black" # black-compatible (e.g., trailing comma) | ||
known_first_party = ["app"] # separate our headers into a section | ||
multi_line_output = 3 # "hanging" indent with dedented paren | ||
force_sort_within_sections = true # don't separate import vs from | ||
order_by_type = false # sort alphabetic regardless of case | ||
|
||
[tool.tox] | ||
requires = ["tox>=4.19"] | ||
env_list = ["unit", "format", "lint", "isort"] | ||
|
||
[tool.tox.env_run_base] | ||
description = "Run test under {base_python}" | ||
base_python = ["python3.9"] | ||
deps = [ | ||
"pytest", | ||
"pytest-asyncio", | ||
"pytest-cov", | ||
"coverage", | ||
] | ||
set_env.COVERAGE = { replace = "env", name = "COVERAGE", default = "/var/tmp/{env:USER}" } | ||
allowlist_externals = ["bash", "echo", "coverage"] | ||
commands = [ | ||
["echo", "{env:COVERAGE}"], | ||
["pip", "list"], | ||
["pytest", "-s", "--cov-branch", "--cov=app", "tests"], | ||
["coverage", "html", "--directory={env:COVERAGE}/html"], | ||
["bash", "-c", "coverage report --format=markdown >{env:COVERAGE}/coverage.txt"], | ||
] | ||
|
||
[tool.tox.env.format] | ||
description = "check code format" | ||
skip_install = true | ||
deps = ["black"] | ||
commands = [["black", "--check", { replace = "posargs", default = ["app", "tests"], extend = true} ]] | ||
|
||
[tool.tox.env.isort] | ||
description = "check order of imports" | ||
skip_install = true | ||
deps = ["isort"] | ||
commands = [["isort", "--check", { replace = "posargs", default = ["app", "tests"], extend = true} ]] | ||
|
||
[tool.tox.env.lint] | ||
description = "check code" | ||
skip_install = true | ||
deps = ["flake8"] | ||
commands = [["flake8", { replace = "posargs", default = ["app", "tests"], extend = true} ]] | ||
|
||
[tool.coverage.run] | ||
branch = true | ||
cover_pylib = true | ||
data_file = "coverage.db" | ||
parallel = true | ||
relative_files = true | ||
|
||
[tool.coverage.report] | ||
include_namespace_packages = true | ||
skip_empty = true | ||
|
||
[build-system] | ||
requires = ["poetry-core>=1.0.0"] | ||
build-backend = "poetry.core.masonry.api" |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,110 @@ | ||
from typing import Any, Optional, Union | ||
|
||
from elasticsearch import AsyncElasticsearch | ||
|
||
|
||
class FakeAsyncElasticsearch(AsyncElasticsearch): | ||
hosts: Union[str, list[str]] | ||
args: dict[str, Any] | ||
closed: bool | ||
|
||
# This fake doesn't try to mimic Opensearch query and aggregation logic: | ||
# instead, the "data" is pre-loaded with a JSON response body that will | ||
# be returned on an "index" match. (This means that any external call we | ||
# need to mock has a single query against any one index!) | ||
data: dict[str, Any] | ||
|
||
def __init__(self, hosts: Union[str, list[str]], **kwargs): | ||
self.hosts = hosts | ||
self.args = kwargs | ||
self.closed = False | ||
self.data = {} | ||
|
||
# Testing helpers to manage fake searches | ||
def set_query( | ||
self, root_index: str, data: list[dict[str, Any]], version: int = 7 | ||
): | ||
ver = f"v{version:d}dev" | ||
index = f"cdm{ver}-{root_index}" | ||
hits = [] | ||
for d in data: | ||
source = d | ||
source["cdm"] = {"ver": ver} | ||
hits.append( | ||
{ | ||
"_index": index, | ||
"_id": "random_string", | ||
"_score": 1.0, | ||
"_source": source, | ||
} | ||
) | ||
self.data[index] = { | ||
"took": 1, | ||
"timed_out": False, | ||
"_shards": {"total": 1, "successful": 1, "skipped": 0, "failed": 0}, | ||
"hits": { | ||
"total": {"value": len(data), "relation": "eq"}, | ||
"max_score": 1.0, | ||
"hits": hits, | ||
}, | ||
} | ||
|
||
# Testing helpers to manage fake aggregations | ||
# | ||
# TODO: how much Opensearch boilerplate (score, etc) can reasonably be | ||
# factored out into this method? | ||
def set_aggregate(self, index: str, data: dict[str, Any]): | ||
self.data[index] = { | ||
"took": 1, | ||
"timed_out": False, | ||
"_shards": {"total": 1, "successful": 1, "skipped": 0, "failed": 0}, | ||
"hits": { | ||
"total": {"value": len(data), "relation": "eq"}, | ||
"max_score": 1.0, | ||
"hits": { | ||
"total": {"value": 10000, "relation": "gte"}, | ||
"max_score": None, | ||
"hits": [], | ||
}, | ||
}, | ||
"aggregations": data, | ||
} | ||
|
||
# Faked AsyncElasticsearch methods | ||
async def close(self): | ||
self.closed = True | ||
|
||
async def info(self, **kwargs): | ||
pass | ||
|
||
async def ping(self, **kwargs): | ||
return True | ||
|
||
async def search( | ||
self, body=None, index=None, doc_type=None, params=None, headers=None, **kwargs | ||
): | ||
if index in self.data: | ||
target = self.data[index] | ||
del self.data[index] | ||
return target | ||
return { | ||
"error": { | ||
"root_cause": [ | ||
{ | ||
"type": "index_not_found_exception", | ||
"reason": f"no such index [{index}]", | ||
"index": index, | ||
"resource.id": index, | ||
"resource.type": "index_or_alias", | ||
"index_uuid": "_na_", | ||
}, | ||
], | ||
"type": "index_not_found_exception", | ||
"reason": f"no such index [{index}]", | ||
"index": index, | ||
"resource.id": index, | ||
"resource.type": "index_or_alias", | ||
"index_uuid": "_na_", | ||
}, | ||
"status": 404, | ||
} |
Oops, something went wrong.